使用AVFoundation Swift保存视频

18

嗨,我按照Jared Davidson的课程创建了一个自定义相机视图,并使用AVFoundation保存图片。

https://www.youtube.com/watch?v=w0O3ZGUS3pk

然而,我想录制和保存视频而不是图片。有人可以帮助我吗?我确定这很简单,但苹果的文档是用Objective-C编写的,我无法解读它。

这是我的代码。谢谢。

import UIKit
import AVFoundation

class ViewController: UIViewController {

    var captureSession = AVCaptureSession()
    var sessionOutput = AVCaptureStillImageOutput()
    var previewLayer = AVCaptureVideoPreviewLayer()


    @IBOutlet var cameraView: UIView!

    override func viewWillAppear(animated: Bool) {

        let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo)
        for device in devices {
            if device.position == AVCaptureDevicePosition.Front{


                do{

                    let input = try AVCaptureDeviceInput(device: device as! AVCaptureDevice)

                    if captureSession.canAddInput(input){

                        captureSession.addInput(input)
                        sessionOutput.outputSettings = [AVVideoCodecKey : AVVideoCodecJPEG]

                        if captureSession.canAddOutput(sessionOutput){

                            captureSession.addOutput(sessionOutput)
                            captureSession.startRunning()

                            previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
                            previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
                            previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.Portrait
                            cameraView.layer.addSublayer(previewLayer)

                            previewLayer.position = CGPoint(x: self.cameraView.frame.width / 2, y: self.cameraView.frame.height / 2)
                            previewLayer.bounds = cameraView.frame


                        }

                    }

                }
                catch{

                    print("Error")
                }

            }
        }    

    }


    @IBAction func TakePhoto(sender: AnyObject) {

        if let videoConnection = sessionOutput.connectionWithMediaType(AVMediaTypeVideo){

            sessionOutput.captureStillImageAsynchronouslyFromConnection(videoConnection, completionHandler: {
                buffer, error in

                let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(buffer)
                UIImageWriteToSavedPhotosAlbum(UIImage(data: imageData)!, nil, nil, nil)

            })

        }

    }

}
7个回答

25

通过创建并添加 AVCaptureMovieFileOutput 到您的捕获会话中,并让您的 ViewController 符合 AVCaptureFileOutputRecordingDelegate 来将视频保存记录到文件。

本示例将录制 5 秒钟的视频并保存至名为 "output.mov" 的文件中,该文件位于应用的文档目录中。

class ViewController: UIViewController, AVCaptureFileOutputRecordingDelegate {

    var captureSession = AVCaptureSession()
    var sessionOutput = AVCaptureStillImageOutput()
    var movieOutput = AVCaptureMovieFileOutput()
    var previewLayer = AVCaptureVideoPreviewLayer()

    @IBOutlet var cameraView: UIView!

    override func viewWillAppear(animated: Bool) {
        self.cameraView = self.view

        let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo)
        for device in devices {
            if device.position == AVCaptureDevicePosition.Front{


                do{

                    let input = try AVCaptureDeviceInput(device: device as! AVCaptureDevice)

                    if captureSession.canAddInput(input){

                        captureSession.addInput(input)
                        sessionOutput.outputSettings = [AVVideoCodecKey : AVVideoCodecJPEG]

                        if captureSession.canAddOutput(sessionOutput){

                            captureSession.addOutput(sessionOutput)

                            previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
                            previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
                            previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.Portrait
                            cameraView.layer.addSublayer(previewLayer)

                            previewLayer.position = CGPoint(x: self.cameraView.frame.width / 2, y: self.cameraView.frame.height / 2)
                            previewLayer.bounds = cameraView.frame


                        }

                        captureSession.addOutput(movieOutput)

                        captureSession.startRunning()

                        let paths = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)
                        let fileUrl = paths[0].URLByAppendingPathComponent("output.mov")
                        try? NSFileManager.defaultManager().removeItemAtURL(fileUrl)
                        movieOutput.startRecordingToOutputFileURL(fileUrl, recordingDelegate: self)

                        let delayTime = dispatch_time(DISPATCH_TIME_NOW, Int64(5 * Double(NSEC_PER_SEC)))
                        dispatch_after(delayTime, dispatch_get_main_queue()) {
                            print("stopping")
                            self.movieOutput.stopRecording()
                        }
                    }

                }
                catch{

                    print("Error")
                }

            }
        }

    }

    func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
        print("FINISHED \(error)")
        // save video to camera roll
        if error == nil {
            UISaveVideoAtPathToSavedPhotosAlbum(outputFileURL.path!, nil, nil, nil)
        }
    }

}

2
您可以在 Xcode > 设备 > iPhone > 应用程序名称 > 下载容器 中从设备下载“文档”目录。您可以通过从 didFinishRecordingToOutputFileAtURL 代理方法调用 UISaveVideoAtPathToSavedPhotosAlbum() 来将视频保存到照片库中。如果您的问题得到解答,请接受此答案。 - Rhythmic Fistman
1
哈哈,非常感谢!希望这也能帮助其他人,因为我看到很多人在问。 - Kervon Ryan

9

非常感谢您的帮助,对我很有帮助。这里是Rhythmic Fistman的答案在Swift 3中移植后需要导入声明和委托方法的版本。

import UIKit
import AVFoundation

class ViewController: UIViewController,
AVCaptureFileOutputRecordingDelegate {

var captureSession = AVCaptureSession()
var sessionOutput = AVCaptureStillImageOutput()
var movieOutput = AVCaptureMovieFileOutput()
var previewLayer = AVCaptureVideoPreviewLayer()

@IBOutlet var cameraView: UIView!

override func viewWillAppear(_ animated: Bool) {
    self.cameraView = self.view

    let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo)
    for device in devices! {
        if (device as AnyObject).position == AVCaptureDevicePosition.front{


            do{

                let input = try AVCaptureDeviceInput(device: device as! AVCaptureDevice)

                if captureSession.canAddInput(input){

                    captureSession.addInput(input)
                    sessionOutput.outputSettings = [AVVideoCodecKey : AVVideoCodecJPEG]

                    if captureSession.canAddOutput(sessionOutput){

                        captureSession.addOutput(sessionOutput)

                        previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
                        previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
                        previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait
                        cameraView.layer.addSublayer(previewLayer)

                        previewLayer.position = CGPoint(x: self.cameraView.frame.width / 2, y: self.cameraView.frame.height / 2)
                        previewLayer.bounds = cameraView.frame


                    }

                    captureSession.addOutput(movieOutput)

                    captureSession.startRunning()

                    let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
                    let fileUrl = paths[0].appendingPathComponent("output.mov")
                    try? FileManager.default.removeItem(at: fileUrl)
                    movieOutput.startRecording(toOutputFileURL: fileUrl, recordingDelegate: self)

                    let delayTime = DispatchTime.now() + 5
                    DispatchQueue.main.asyncAfter(deadline: delayTime) {
                        print("stopping")
                        self.movieOutput.stopRecording()
                    }
                }

            }
            catch{

                print("Error")
            }

        }
    }

}


//MARK: AVCaptureFileOutputRecordingDelegate Methods

func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {

}

func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
    print("FINISHED \(error)")
    // save video to camera roll
    if error == nil {
        UISaveVideoAtPathToSavedPhotosAlbum(outputFileURL.path, nil, nil, nil)
    }
}

}

视频录制并保存在库中没问题,但声音没有被录制。请帮帮我。 - nitin.agam
在添加AVCaptureMovieFileOutput()预览层之后,停止在视图上显示视频了。对此有什么建议吗? - Qazi Ammar

3

您可以使用此代码将视频保存到照片库中,您需要提供以下参数,其中最重要的是OutputURL.path,它是指向所需保存到摄像机胶卷相册的电影文件的文件系统路径。对于其余参数,您可以传递它们的相应值,也可以赋值为nil,具体取决于您的需要。

func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {

if (error != nil) {
        print("Error recording movie: \(error!.localizedDescription)")
    } else {


        UISaveVideoAtPathToSavedPhotosAlbum(outputFileURL.path, nil, #selector(CameraController.video(_:didFinishSavingWithError:contextInfo:)), nil)

    }
    outputURL = nil

}

嗨,请避免仅回答代码。请在您的答案中添加解释,谢谢。 - Leviand

0

关于声音录制问题,

在创建captureSession时添加以下代码:

askMicroPhonePermission(completion: { (isMicrophonePermissionGiven) in

            if isMicrophonePermissionGiven {
                do {
                    try self.captureSession.addInput(AVCaptureDeviceInput(device: self.captureAudio))
                } catch {
                    print("Error creating the database")
                }
            }
        })

////////////////////////////////////////////////////////////////

askMicroPhonePermission函数如下

func askMicroPhonePermission(completion: @escaping (_ success: Bool)-> Void) {
    switch AVAudioSession.sharedInstance().recordPermission() {
    case AVAudioSessionRecordPermission.granted:
        completion(true)
    case AVAudioSessionRecordPermission.denied:
        completion(false) //show alert if required
    case AVAudioSessionRecordPermission.undetermined:
        AVAudioSession.sharedInstance().requestRecordPermission({ (granted) in
            if granted {
                completion(true)
            } else {
                completion(false) // show alert if required
            }
        })
    default:
        completion(false)
    }
}

你需要在info.plist文件中添加NSMicrophoneUsageDescription键值对。

0

针对委托未被调用的人,这里只有一个小提示:(我使用的是 Xcode 12.x / iOS 14.5)

假设您有一个打开本地目标 URL 的函数(代码来自之前的示例...文档目录...)

如果您执行以下操作:

DispatchQueue.global(qos: .userInitiated).async { //[weak self] in
                    self.captureSession.startRunning()
                    self.addDest()    
                }

它可以工作,

但如果你交换:

DispatchQueue.global(qos: .userInitiated).async { //[weak self] in
                    **self.addDest()**
                    self.captureSession.startRunning()   

                }

委托未被调用。

在设置输出后调用了“startRunning”:

if captureSession.canAddInput(input!) && captureSession.canAddOutput(stillImageOutput) {
            captureSession.addInput(input!)
            captureSession.addOutput(stillImageOutput)
            setupLivePreview()
            self.videoPreviewLayer.frame = self.previewView.bounds
            
            if addVideoOutput(){
                DispatchQueue.global(qos: .userInitiated).async { //[weak self] in
                    self.captureSession.startRunning()
                    
                    self.addDest()

                }
                
            }

...

在哪里

 func addVideoOutput() -> Bool{
        movieOutput = AVCaptureMovieFileOutput()
        if captureSession.canAddOutput(movieOutput){
            captureSession.addOutput(movieOutput)
            return true
        }
        return false
    }
  

0

if (device as AnyObject).position == AVCaptureDevicePosition.front{ 之后添加

加入

// Audio Input
                let audioInputDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)

                do
                {
                    let audioInput = try AVCaptureDeviceInput(device: audioInputDevice)

                    // Add Audio Input
                    if captureSession.canAddInput(audioInput)
                    {
                        captureSession.addInput(audioInput)
                    }
                    else
                    {
                        NSLog("Can't Add Audio Input")
                    }
                }
                catch let error
                {
                    NSLog("Error Getting Input Device: \(error)")
                }

谢谢


0
func getCurrentFrame(url:String) -> UIImage? {
   let asset = AVAsset(url: URL(string: url)!)
    let assetImgGenerate = AVAssetImageGenerator(asset: asset)
    assetImgGenerate.appliesPreferredTrackTransform = true
    //Can set this to improve performance if target size is known before hand
    //assetImgGenerate.maximumSize = CGSize(width,height)
    let time = CMTimeMakeWithSeconds(1.0, preferredTimescale: 600)
    do {
        let img = try assetImgGenerate.copyCGImage(at: time, actualTime: nil)
        let thumbnail = UIImage(cgImage: img)
        return thumbnail
    } catch {
      print(error.localizedDescription)
      return nil
    }
}

网页内容由stack overflow 提供, 点击上面的
可以查看英文原文,
原文链接