在Swift中连接两个音频文件并播放它们

8
我试着在Swift中连接.wav音频文件。
这是我的代码:
func merge(audio1: NSURL, audio2:  NSURL) {


    var error:NSError?

    var ok1 = false
    var ok2 = false


    var documentsDirectory:String = paths[0] as! String

    //Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack.
    var composition = AVMutableComposition()
    var compositionAudioTrack1:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
    var compositionAudioTrack2:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())

    //create new file to receive data
    var documentDirectoryURL = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask).first! as! NSURL
    var fileDestinationUrl = documentDirectoryURL.URLByAppendingPathComponent("resultmerge.wav")
    println(fileDestinationUrl)


    var url1 = audio1
    var url2 = audio2


    var avAsset1 = AVURLAsset(URL: url1, options: nil)
    var avAsset2 = AVURLAsset(URL: url2, options: nil)

    var tracks1 =  avAsset1.tracksWithMediaType(AVMediaTypeAudio)
    var tracks2 =  avAsset2.tracksWithMediaType(AVMediaTypeAudio)

    var assetTrack1:AVAssetTrack = tracks1[0] as! AVAssetTrack
    var assetTrack2:AVAssetTrack = tracks2[0] as! AVAssetTrack


    var duration1: CMTime = assetTrack1.timeRange.duration
    var duration2: CMTime = assetTrack2.timeRange.duration

    var timeRange1 = CMTimeRangeMake(kCMTimeZero, duration1)
    var timeRange2 = CMTimeRangeMake(duration1, duration2)


    ok1 = compositionAudioTrack1.insertTimeRange(timeRange1, ofTrack: assetTrack1, atTime: kCMTimeZero, error: nil)
    if ok1 {

        ok2 = compositionAudioTrack2.insertTimeRange(timeRange2, ofTrack: assetTrack2, atTime: duration1, error: nil)

        if ok2 {
            println("success")
        }
    }

    //AVAssetExportPresetPassthrough => concatenation
    var assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetPassthrough)
    assetExport.outputFileType = AVFileTypeWAVE
    assetExport.outputURL = fileDestinationUrl
    assetExport.exportAsynchronouslyWithCompletionHandler({
        switch assetExport.status{
        case  AVAssetExportSessionStatus.Failed:
            println("failed \(assetExport.error)")
        case AVAssetExportSessionStatus.Cancelled:
            println("cancelled \(assetExport.error)")
        default:
            println("complete")
            var audioPlayer = AVAudioPlayer()
            audioPlayer = AVAudioPlayer(contentsOfURL: fileDestinationUrl, error: nil)
            audioPlayer.prepareToPlay()
            audioPlayer.play()
        }

    })

}

在终端上(在 iPhone 上运行)收到此错误:
file:///var/mobile/Containers/Data/Application/3F49D360-B363-4600-B3BB-EE0810501910/Documents/resultmerge.wav
成功
失败,错误域名为 AVFoundationErrorDomain,代码为 -11838,“操作已中止”,用户信息为 0x174269ac0 {NSLocalizedDescription=Opération interrompue, NSLocalizedFailureReason=L’opération n’est pas prise en charge pour ce contenu multimédia.}
但我不知道为什么会出现这个错误。非常感谢您能给我任何帮助 :)

谢谢您的建议 :) 不幸的是,我使用 assetExport.outputFileType = AVFileTypeAppleM4A 仍然遇到了相同的错误。 - Pierre Louis Bresson
我们知道有没有办法让一个文件比另一个文件更响亮? - swiftyboi
7个回答

11

针对 Swift 3.0 - 由 @Peyman 贡献 (经过轻微修改)

var mergeAudioURL = NSURL()

func mergeAudioFiles(audioFileUrls: NSArray) {
    let composition = AVMutableComposition()

    for i in 0 ..< audioFileUrls.count {

        let compositionAudioTrack :AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())

        let asset = AVURLAsset(url: (audioFileUrls[i] as! NSURL) as URL)

        let track = asset.tracks(withMediaType: AVMediaTypeAudio)[0]

        let timeRange = CMTimeRange(start: CMTimeMake(0, 600), duration: track.timeRange.duration)

        try! compositionAudioTrack.insertTimeRange(timeRange, of: track, at: composition.duration)
    }

    let documentDirectoryURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first! as NSURL
    self.mergeAudioURL = documentDirectoryURL.appendingPathComponent("FinalAudio.m4a")! as URL as NSURL

    let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
    assetExport?.outputFileType = AVFileTypeAppleM4A
    assetExport?.outputURL = mergeAudioURL as URL
    assetExport?.exportAsynchronously(completionHandler:
        {
            switch assetExport!.status
            {
            case AVAssetExportSessionStatus.failed:
                print("failed \(assetExport?.error)")
            case AVAssetExportSessionStatus.cancelled:
                print("cancelled \(assetExport?.error)")
            case AVAssetExportSessionStatus.unknown:
                print("unknown\(assetExport?.error)")
            case AVAssetExportSessionStatus.waiting:
                print("waiting\(assetExport?.error)")
            case AVAssetExportSessionStatus.exporting:
                print("exporting\(assetExport?.error)")
            default:
                print("Audio Concatenation Complete")
            }
    })
}

6

我通过两个变化使你的代码工作:

  • 预设名称:从AVAssetExportPresetPassthrough更改为AVAssetExportPresetAppleM4A

  • 输出文件类型:从AVFileTypeWAVE更改为AVFileTypeAppleM4A

像这样修改你的assetExport声明:

var assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
assetExport.outputFileType = AVFileTypeAppleM4A

然后它将正确合并文件。

看起来AVAssetExportSession只能导出M4A格式,并忽略其他预设。可能有一种方法可以使其导出其他格式(通过子类化它?),尽管我还没有探索过这种可能性。


好的,它运行了一次。如果我再做一遍,就会遇到之前的相同问题。确实,如果我删除这个应用程序,它又能再次工作一次,然后停止工作。 - Pierre Louis Bresson
我又遇到了“失败错误域=AVFoundationErrorDomain Code=-11838...”错误。虽然我不知道为什么会出现这个问题,但我发现通过删除旧的音频文件可以解决这个问题。 - Pierre Louis Bresson
我认真地认为,这第二次发生的错误与音频转换本身无关,而是与您的文件管理有关... - Eric Aya
哎呀,刚看到你的新编辑/答案。感谢发布这个澄清。 :) - Eric Aya
我们知道有没有办法让一个文件比另一个文件更响亮? - swiftyboi
有没有一种方法可以合并文件,而不将音频转换为压缩的m4a格式?我想处理附加的音频文件,以便进行需要原始格式的某些操作。 - grehce

6

我需要合并多个音频文件,所以我重写了这个函数,使其能够接受一个NSURL数组。我想在这里分享一下。

我是Swift的新手,请提供反馈。

归功于:@Eric D. @Pierre Louis Bresson

以下是代码:

func mergeAudioFiles(audioFileUrls: NSArray, callback: (url: NSURL?, error: NSError?)->()) {

// Create the audio composition
let composition = AVMutableComposition()

// Merge
for (var i = 0; i < audioFileUrls.count; i++) {

    let compositionAudioTrack :AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())

    let asset = AVURLAsset(URL: audioFileUrls[i] as! NSURL)

    let track = asset.tracksWithMediaType(AVMediaTypeAudio)[0]

    let timeRange = CMTimeRange(start: CMTimeMake(0, 600), duration: track.timeRange.duration)

    try! compositionAudioTrack.insertTimeRange(timeRange, ofTrack: track, atTime: composition.duration)
}

// Create output url
let format = NSDateFormatter()
format.dateFormat="yyyy-MM-dd-HH-mm-ss"
let currentFileName = "recording-\(format.stringFromDate(NSDate()))-merge.m4a"
print(currentFileName)

let documentsDirectory = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
let outputUrl = documentsDirectory.URLByAppendingPathComponent(currentFileName)

// Export it
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
assetExport?.outputFileType = AVFileTypeAppleM4A
assetExport?.outputURL = outputUrl

assetExport?.exportAsynchronouslyWithCompletionHandler({ () -> Void in
    switch assetExport!.status {
        case AVAssetExportSessionStatus.Failed:
            callback(url: nil, error: assetExport?.error)
        default:
            callback(url: assetExport?.outputURL, error: nil)
    }
})

}

1
这是代码,需要放置在println(fileDestinationUrl)之后:
var file = "resultmerge.m4a"
var dirs : [String] = (NSSearchPathForDirectoriesInDomains(NSSearchPathDirectory.DocumentDirectory, NSSearchPathDomainMask.AllDomainsMask, true) as? [String])!
var dir = dirs[0] //documents directory
var path = dir.stringByAppendingPathComponent(file)
var pathURLarray:Array = (NSURL(fileURLWithPath: path)!).pathComponents!
var pathURL:String = ""
var final = ""
var debut = ""

for i in 1...(pathURLarray.count-1) {
if i == pathURLarray.count-1 {
final = ""
} else {
final = "/"
}
if i == 1 {
debut = "/"
} else {
debut = ""
}
pathURL = debut + pathURL + (pathURLarray[i] as! String) + final
}

var checkValidation = NSFileManager.defaultManager()
if checkValidation.fileExistsAtPath(pathURL) {
    println("file exist")
    if NSFileManager.defaultManager().removeItemAtURL(fileDestinationUrl, error: nil) {
        println("delete")
    }
} else {
    println("no file")
}

通过这个和 @Eric D. 的答案,它正在工作。

0

为了帮助其他人“连接”,我也进行了更改

var timeRange2 = CMTimeRangeMake(duration1, duration2)

进入

var timeRange2 = CMTimeRangeMake(kCMTimeZero, duration2)

(除了 @Eric D. @Pierre Louis Bresson 的代码之外)。

0

针对 Swift 2.1:

class func getDocumentsDirectory() -> NSString {
    let paths = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true) as [String]
    let documentsDirectory = paths[0]
    return documentsDirectory
}

class func getFileURL() -> NSURL {
    let audioFilename = getDocumentsDirectory().stringByAppendingPathComponent("\(NSDate()).m4a")
    let audioURL = NSURL(fileURLWithPath: audioFilename)

    return audioURL
}

func merge(audio1: NSURL, audio2:  NSURL) {

    finalURL = ProcessViewController.getFileURL()

    let preferredTimeScale : Int32 = 100

    //This object will be edited to include both audio files
    let composition = AVMutableComposition()

    //Song 1 setup
    let compositionAudioTrack1:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
    let url1 = audio1
    let avAsset1 = AVURLAsset(URL: url1, options: nil)
    let tracks1 =  avAsset1.tracksWithMediaType(AVMediaTypeAudio)
    let assetTrack1:AVAssetTrack = tracks1[0]
    let duration1: CMTime = CMTimeMakeWithSeconds(trimmedLength1, preferredTimeScale)
    let startCMTime = CMTimeMakeWithSeconds(Double(startTime1), preferredTimeScale)
    let timeRange1 = CMTimeRangeMake(startCMTime, duration1)

    //Song 2 setup
    let compositionAudioTrack2:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
    let url2 = audio2
    let avAsset2 = AVURLAsset(URL: url2, options: nil)
    let tracks2 =  avAsset2.tracksWithMediaType(AVMediaTypeAudio)
    let assetTrack2:AVAssetTrack = tracks2[0]
    let duration2: CMTime = CMTimeMakeWithSeconds(trimmedLength2, preferredTimeScale)
    let startCMTime2 = CMTimeMakeWithSeconds(Double(startTime2), preferredTimeScale)
    let timeRange2 = CMTimeRangeMake(startCMTime, duration1)


    //Insert the tracks into the composition
    do {

        try compositionAudioTrack1.insertTimeRange(timeRange1, ofTrack: assetTrack1, atTime: kCMTimeZero)

        try compositionAudioTrack2.insertTimeRange(timeRange2, ofTrack: assetTrack2, atTime: duration1)


    } catch {

        print(error)

    }

    //Perform the merge
    let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
    assetExport!.outputFileType = AVFileTypeAppleM4A
    assetExport!.outputURL = finalURL
    assetExport!.exportAsynchronouslyWithCompletionHandler({
        switch assetExport!.status{
        case  AVAssetExportSessionStatus.Failed:
            print("failed \(assetExport!.error)")
        case AVAssetExportSessionStatus.Cancelled:
            print("cancelled \(assetExport!.error)")
        default:
            print("complete")

            self.initializeAudioPlayer()

        }

    })

}

0

另外,如果你想要它同步处理,只需添加一个调度程序即可

var errorExport = true
    //AVAssetExportPresetPassthrough => concatenation
    var assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
    assetExport.outputFileType =  AVFileTypeAppleM4A
    assetExport.outputURL = fileDestinationUrl
    let sessionWaitSemaphore = dispatch_semaphore_create(0)

    assetExport.exportAsynchronouslyWithCompletionHandler({
        switch assetExport.status{
        case  AVAssetExportSessionStatus.Failed:

           println("failed \(assetExport.error)")
        case AVAssetExportSessionStatus.Cancelled:

          println("cancelled \(assetExport.error)")
        default:
            //
            errorExport = false
        }

        dispatch_semaphore_signal(sessionWaitSemaphore)
        return Void()

    })

    dispatch_semaphore_wait(sessionWaitSemaphore, DISPATCH_TIME_FOREVER)

    return errorExport

网页内容由stack overflow 提供, 点击上面的
可以查看英文原文,
原文链接