iOS 添加背景音乐
最近在家无聊至极,天天刷抖音。突然灵感上来,研究了一下抖音加背景音乐的技术。本文使用两段音频叠加成一段(不是前后合成一段,其实都差不多),类似于背景音乐;
啥都不说了,先上代码!
- (void)audioComposition{ //1.获取本地音频素材 NSString *audioPath1 = [[NSBundle mainBundle]pathForResource:@"1" ofType:@"m4a"]; NSString *audioPath2 = [[NSBundle mainBundle]pathForResource:@"2" ofType:@"m4a"]; AVURLAsset *audioAsset1 = [AVURLAsset assetWithURL:[NSURL fileURLWithPath:audioPath1]]; AVURLAsset *audioAsset2 = [AVURLAsset assetWithURL:[NSURL fileURLWithPath:audioPath2]]; //2.创建两个音频轨道,并获取两个音频素材的轨道 AVMutableComposition *composition = [AVMutableComposition composition]; //音频轨道 AVMutableCompositionTrack *audioTrack1 = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:0]; AVMutableCompositionTrack *audioTrack2 = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:0]; //获取音频素材轨道 AVAssetTrack *audioAssetTrack1 = [[audioAsset1 tracksWithMediaType:AVMediaTypeAudio] firstObject]; AVAssetTrack *audioAssetTrack2 = [[audioAsset2 tracksWithMediaType:AVMediaTypeAudio] firstObject]; //3.将两段音频插入音轨文件,进行合并 //音频合并- 插入音轨文件 // `startTime`参数设置为‘kCMTimeZero’,表示合成在一块; // `startTime`参数设置为第一段音频的时长,即`audioAsset1.duration`, 表示将第二段音频插入到第一段音频的尾部。 [audioTrack1 insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioAsset1.duration) ofTrack:audioAssetTrack1 atTime:kCMTimeZero error:nil]; [audioTrack2 insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioAsset2.duration) ofTrack:audioAssetTrack2 atTime:kCMTimeZero error:nil]; //4. 导出合并后的音频文件 //`presetName`要和之后的`session.outputFileType`相对应 AVAssetExportSession *session = [[AVAssetExportSession alloc]initWithAsset:composition presetName:AVAssetExportPresetAppleM4A]; NSString *outPutFilePath = [[self.exportUrl stringByDeletingLastPathComponent] stringByAppendingPathComponent:@"exportUrl.m4a"]; if ([[NSFileManager defaultManager] fileExistsAtPath:outPutFilePath]) { [[NSFileManager defaultManager] removeItemAtPath:outPutFilePath error:nil]; } // 查看当前session支持的fileType类型 NSLog(@"---%@",[session supportedFileTypes]); NSLog(@"exportUrl:%@", self.exportUrl); session.outputURL = [NSURL fileURLWithPath:self.exportUrl]; session.outputFileType = AVFileTypeAppleM4A; //与上述的`present`相对应 session.shouldOptimizeForNetworkUse = YES; //优化网络 [session exportAsynchronouslyWithCompletionHandler:^{ switch (session.status) { case AVAssetExportSessionStatusUnknown: { NSLog(@"AVAssetExportSessionStatusUnknown"); } break; case AVAssetExportSessionStatusWaiting: { NSLog(@"AVAssetExportSessionStatusWaiting"); } break; case AVAssetExportSessionStatusExporting: { NSLog(@"AVAssetExportSessionStatusExporting"); } break; case AVAssetExportSessionStatusCompleted: { NSLog(@"AVAssetExportSessionStatusCompleted"); _audioPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:[NSURL fileURLWithPath:outPutFilePath] error:nil]; [_audioPlayer play]; } break; case AVAssetExportSessionStatusFailed: { NSLog(@"AVAssetExportSessionStatusFailed"); } break; case AVAssetExportSessionStatusCancelled: { NSLog(@"AVAssetExportSessionStatusCancelled"); } break; default: break; } }]; }