//合并音频 func mergeAudio() { let composition = AVMutableComposition() for i in 0 ..< audioLocalUrls.count { let compositionAudioTrack :AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID()) let asset = AVURLAsset(url: URL.init(fileURLWithPath: audioLocalUrls[i])) let track = asset.tracks(withMediaType: AVMediaTypeAudio)[0] var timeRange:CMTimeRange timeRange = CMTimeRange(start: CMTimeMake(0, 600), duration: track.timeRange.duration) try! compositionAudioTrack.insertTimeRange(timeRange, of: track, at: composition.duration) } let documentDirectoryURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first! as NSURL let stringDate = BaseController().getCurrentTime() self.mergeAudioURL = (documentDirectoryURL.appendingPathComponent(BaseController().randomMD5(str: stringDate) + ".m4a")! as URL as NSURL) as URL! let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A) assetExport?.outputFileType = AVFileTypeAppleM4A assetExport?.outputURL = mergeAudioURL as URL assetExport?.exportAsynchronously(completionHandler: { switch assetExport!.status { case AVAssetExportSessionStatus.failed: print("failed \(assetExport?.error)") case AVAssetExportSessionStatus.cancelled: print("cancelled \(assetExport?.error)") case AVAssetExportSessionStatus.unknown: print("unknown\(assetExport?.error)") case AVAssetExportSessionStatus.waiting: print("waiting\(assetExport?.error)") case AVAssetExportSessionStatus.exporting: print("exporting\(assetExport?.error)") default: print("success\(assetExport?.error)") for i in 0..<self.audioLocalUrls.count{ let fileManager = FileManager.default try! fileManager.removeItem(at: URL.init(fileURLWithPath: self.audioLocalUrls[i])) } } }) }
请发表评论