最近接触英语学习类app,根据视频跟读句子,完成配音作业,然后将自己的多个录音和原视频合成为一个新的视频。
之前看过几篇类似的博客,都是写一个视频和一个音频合成,要么是一个视频去掉音频,提取音频之类的。但是都没有多个音频根据时间节点拼接到原有的视频中去,或者理解为替换该节点的音频。【直接复制下面代码即可使用】
不明白的地方欢迎➕抠:【3376512101】
一、思路
第一步:将分别提取原视频的音频(Ymp3)、视频(Ymp4);
第二步:将自己的多个录音根据时间节点依次合成到提取的原音频(Ymp3)中去,最后合成为一个新的音频(Xmp3);
第三部:将合成的新音频(Xmp3)和提取的原视频(Ymp4)重新合成为一个新的视频(Perfect.mp4);
二、代码
//
// OralEvaluationListView.h
// boxfairy
//
// Created by Yujiao on 2019/3/28.
// Copyright © 2019 Yujiao. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface HJMergeVideoWithMusic : NSObject
/**
没有背景音乐的视频添加背景音乐
@param musicPath 背景音乐地址
@param videoPath 视频地址
@param savePath 保存视频地址
@param successBlock 合成成功
*/
+ (void)mergeVideoWithMusic:(NSString *)musicPath noBgMusicVideo:(NSString *)videoPath saveVideoPath:(NSString *)savePath success:(void(^)(NSURL *recordPath))successBlock;
//抽取原视频的音频与需要的音乐混合
/**
音频视频合成
@param musicArrayPath 音频(可以是多个,多个时可支撑替换某一段的音频)
@param musicStartTime 音频的时间段,起始时间
@param videoPath 视频
@param savePath 保存地址
@param successBlock 合成成功
*/
+ (void)mergeVideoWithMusicArray:(NSArray *)musicArrayPath musicStartTime:(NSArray *)musicStartTimes video:(NSString *)videoPath saveVideoPath:(NSString *)savePath success:(void(^)(NSURL *recordPath))successBlock;
@end
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^【.m文件】^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
//
// OralEvaluationListView.h
// boxfairy
//
// Created by Yujiao on 2019/3/28.
// Copyright © 2019 Yujiao. All rights reserved.
//
#import "HJMergeVideoWithMusic.h"
#import <AVFoundation/AVFoundation.h>
#import "boxfairy-Swift.h"
@implementation HJMergeVideoWithMusic
/**
没有背景音乐的视频添加背景音乐
@param musicPath 背景音乐地址
@param videoPath 视频地址
@param savePath 保存视频地址
@param successBlock 合成成功
*/
+ (void)mergeVideoWithMusic:(NSString *)musicPath noBgMusicVideo:(NSString *)videoPath saveVideoPath:(NSString *)savePath success:(void(^)(NSURL *recordPath))successBlock{
// 声音来源
NSURL *audioInputUrl = [NSURL fileURLWithPath:musicPath];
// 视频来源
NSURL *videoInputUrl = [NSURL fileURLWithPath:videoPath];
// 添加合成路径
NSURL *outputFileUrl = [NSURL fileURLWithPath:savePath];
// 时间起点
CMTime nextClistartTime = kCMTimeZero;
// 创建可变的音视频组合
AVMutableComposition *comosition = [AVMutableComposition composition];
// 视频采集
AVURLAsset *videoAsset = [[AVURLAsset alloc] initWithURL:videoInputUrl options:nil];
// 视频时间范围
CMTimeRange videoTimeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
// 视频通道 枚举 kCMPersistentTrackID_Invalid = 0
AVMutableCompositionTrack *videoTrack = [comosition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
// 视频采集通道
AVAssetTrack *videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] firstObject];
// 把采集轨道数据加入到可变轨道之中
[videoTrack insertTimeRange:videoTimeRange ofTrack:videoAssetTrack atTime:nextClistartTime error:nil];
// 声音采集
AVURLAsset *audioAsset = [[AVURLAsset alloc] initWithURL:audioInputUrl options:nil];
// 因为视频短这里就直接用视频长度了,如果自动化需要自己写判断
CMTimeRange audioTimeRange = videoTimeRange;
// 音频通道
AVMutableCompositionTrack *audioTrack = [comosition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
// 音频采集通道
AVAssetTrack *audioAssetTrack = [[audioAsset tracksWithMediaType:AVMediaTypeAudio] firstObject];
// 加入合成轨道之中
[audioTrack insertTimeRange:audioTimeRange ofTrack:audioAssetTrack atTime:nextClistartTime error:nil];
// 创建一个输出
AVAssetExportSession *assetExport = [[AVAssetExportSession alloc] initWithAsset:comosition presetName:AVAssetExportPresetMediumQuality];
// 输出类型
assetExport.outputFileType = AVFileTypeQuickTimeMovie;
// 输出地址
assetExport.outputURL = outputFileUrl;
// 优化
assetExport.shouldOptimizeForNetworkUse = YES;
// 合成完毕
//混音后的输出地址
NSString *exportFile = [NSHomeDirectory() stringByAppendingPathComponent:
[NSString stringWithFormat:@"Documents/video.mp4"]];//和上面的类型保持一致
if([[NSFileManager defaultManager]fileExistsAtPath:exportFile]) {
[[NSFileManager defaultManager]removeItemAtPath:exportFile error:nil];
}
NSURL *exportURL =[NSURL fileURLWithPath:exportFile];
assetExport.outputURL = exportURL;
[assetExport exportAsynchronouslyWithCompletionHandler:^{
// 回到主线程
dispatch_async(dispatch_get_main_queue(), ^{
successBlock(exportURL);
});
}];
}
/**
音频视频合成
@param musicArrayPath (多个录音)音频数组
@param musicStartTime 音频的时间段,起始时间
@param videoPath 视频
@param savePath 保存地址
@param successBlock 合成成功
*/
+ (void)mergeVideoWithMusicArray:(NSArray *)musicArrayPath musicStartTime:(NSArray *)musicStartTimes video:(NSString *)videoPath saveVideoPath:(NSString *)savePath success:(void(^)(NSURL *recordPath))successBlock {
//第一步:音频和视频中的音频混音,生成混音文件
AVMutableComposition *composition =[AVMutableComposition composition];
NSMutableArray *audioMixParams =[NSMutableArray array];
//录制的视频
NSURL *video_inputFileUrl = [NSURL fileURLWithPath:videoPath];
AVURLAsset *songAsset =[AVURLAsset URLAssetWithURL:video_inputFileUrl options:nil];
CMTime startTime =CMTimeMakeWithSeconds(0,songAsset.duration.timescale);
CMTime trackDuration =songAsset.duration;
//获取视频中的音频素材
AVMutableCompositionTrack * video_track = [self setUpAndAddAudioAtPath:video_inputFileUrl toComposition:composition start:startTime dura:trackDuration offset:CMTimeMake(14*44100,44100) addAudioParams:audioMixParams setVolume:0.2f];
//本地要插入的音乐 (这里如果只配一首背景音乐是,就用这个方法),且支撑原音和配音同时存在
// NSURL *assetURL2 =[NSURL fileURLWithPath:musicPath];
//获取设置完的本地音乐素材
// [self setUpAndAddAudioAtPath:assetURL2 toComposition:composition start:startTime dura:trackDuration offset:CMTimeMake(0,44100) addAudioParams:audioMixParams setVolume:0.0f];
// video_track 把抽离出来的音频进行编辑(多段配音)这个函数时替换某一段原视频中的音频。
[self setUpAndAddAudioArrayAtPath:musicArrayPath musicStartTime:musicStartTimes toComposition:composition offset:CMTimeMake(0,44100) addAudioParams:audioMixParams setVolume:0.8f videoTrack:video_track];
//创建一个可变的音频混合
AVMutableAudioMix *audioMix =[AVMutableAudioMix audioMix];
audioMix.inputParameters = [NSArray arrayWithArray:audioMixParams];//从数组里取出处理后的音频轨道参数
//创建一个输出
AVAssetExportSession *exporter =[[AVAssetExportSession alloc]
initWithAsset:composition
presetName:AVAssetExportPresetAppleM4A];
exporter.audioMix = audioMix;
//如果要支撑原音和配音时,可设置双轨道原音和配音的声音大小
// exporter.audioMix = [self buildAudioMixWithVideoTrack:video_track VideoVolume:0.0f BGMTrack:audio_track BGMVolume:0.8f controlVolumeRange:kCMTimeZero];
exporter.outputFileType=@"com.apple.m4a-audio";//输出的类型也是 m4a文件
//混音后的输出地址
NSString *exportFile = [NSHomeDirectory() stringByAppendingPathComponent:
[NSString stringWithFormat:@"Documents/music.m4a"]];//和上面的类型保持一致
if([[NSFileManager defaultManager]fileExistsAtPath:exportFile]) {
[[NSFileManager defaultManager]removeItemAtPath:exportFile error:nil];
}
NSLog(@"输出混音路径===%@",exportFile);
NSURL *exportURL =[NSURL fileURLWithPath:exportFile];
exporter.outputURL = exportURL;
[exporter exportAsynchronouslyWithCompletionHandler:^{
NSLog(@"音频混音完毕,开始合成音频、视频");
if ([[NSFileManager defaultManager] fileExistsAtPath:exportFile]) {
//第二步:混音和视频合成
[self theVideoWithMixMusic:exportFile videoPath:videoPath savePath:savePath success:successBlock];
}
}];
}
/**
音频和视频混合
@param mixURLPath 混音
@param videoPath 视频
@param savePath 保存视频
@param successBlock 成功
*/
+ (void)theVideoWithMixMusic:(NSString *)mixURLPath videoPath:(NSString *)videoPath savePath:(NSString *)savePath success:(void(^)(NSURL *recordPath))successBlock
{
//声音来源路径(最终混合的音频)
NSURL *audio_inputFileUrl =[NSURL fileURLWithPath:mixURLPath];
//视频来源路径
NSURL *video_inputFileUrl = [NSURL fileURLWithPath:videoPath];
//最终合成输出路径
NSURL *outputFileUrl = [NSURL fileURLWithPath:savePath];
CMTime nextClipStartTime =kCMTimeZero;
//创建可变的音频视频组合
AVMutableComposition* mixComposition =[AVMutableComposition composition];
//视频采集
AVURLAsset* videoAsset =[[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange =CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack*a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]atTime:nextClipStartTime error:nil];
//声音采集
AVURLAsset* audioAsset =[[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange =CMTimeRangeMake(kCMTimeZero,videoAsset.duration);//声音长度截取范围==视频长度
AVMutableCompositionTrack*b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio]objectAtIndex:0]atTime:nextClipStartTime error:nil];
//创建一个输出
AVAssetExportSession* _assetExport =[[AVAssetExportSession alloc]initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
_assetExport.outputFileType =AVFileTypeQuickTimeMovie;
_assetExport.outputURL =outputFileUrl;
_assetExport.shouldOptimizeForNetworkUse=YES;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
dispatch_async(dispatch_get_main_queue(), ^{
NSLog(@"完成!输出路径==%@",savePath);
if([[NSFileManager defaultManager]fileExistsAtPath:mixURLPath]) {
[[NSFileManager defaultManager]removeItemAtPath:mixURLPath error:nil];
}
successBlock(outputFileUrl);
});
}];
}
//通过文件路径建立和添加音频素材
+ (AVMutableCompositionTrack *)setUpAndAddAudioAtPath:(NSURL*)assetURL toComposition:(AVMutableComposition*)composition start:(CMTime)start dura:(CMTime)dura offset:(CMTime)offset addAudioParams:(NSMutableArray *)audioMixParams setVolume:(float)volume {
AVURLAsset *songAsset =[AVURLAsset URLAssetWithURL:assetURL options:nil];
AVMutableCompositionTrack *track =[composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *sourceAudioTrack =[[songAsset tracksWithMediaType:AVMediaTypeAudio]objectAtIndex:0];
NSError *error =nil;
BOOL ok = NO;
CMTime startTime = start;
CMTime trackDuration = dura;
CMTimeRange tRange =CMTimeRangeMake(startTime,trackDuration);
//设置音量
//AVMutableAudioMixInputParameters(输入参数可变的音频混合)
//audioMixInputParametersWithTrack(音频混音输入参数与轨道)
AVMutableAudioMixInputParameters *trackMix =[AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:track];
[trackMix setVolume:0.8f atTime:startTime];
//素材加入数组
[audioMixParams addObject:trackMix];
//Insert audio into track //offsetCMTimeMake(0, 44100)
ok = [track insertTimeRange:tRange ofTrack:sourceAudioTrack atTime:kCMTimeInvalid error:&error];
return track;
}
//通过文件路径建立和添加音频素材 (多个音频)
+ ()setUpAndAddAudioArrayAtPath:(NSArray*)assetArrayURL musicStartTime:(NSArray *)musicStartTimes toComposition:(AVMutableComposition*)composition offset:(CMTime)offset addAudioParams:(NSMutableArray *)audioMixParams setVolume:(float)volume videoTrack:(AVMutableCompositionTrack *)videoTrack {
for (int i = 0; i < assetArrayURL.count; i ++) {// 依次加入每个asset
NSString * audioPath = assetArrayURL[i];
AVURLAsset *audioAsset = [AVURLAsset assetWithURL:[NSURL fileURLWithPath:audioPath]];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero,audioAsset.duration);
AVAssetTrack *sourceAudioTrack =[[audioAsset tracksWithMediaType:AVMediaTypeAudio]objectAtIndex:0];
NSError *error =nil;
BOOL ok = NO;
//设置音量
//AVMutableAudioMixInputParameters(输入参数可变的音频混合)
//audioMixInputParametersWithTrack(音频混音输入参数与轨道)
AVMutableAudioMixInputParameters *trackMix =[AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:videoTrack];
// if (volume == 0.0f) {
// [trackMix setVolumeRampFromStartVolume:volume toEndVolume:volume timeRange:tRange];
// }else {
// [trackMix setVolume:volume atTime:startTime];
// }
CMTime startTime =CMTimeMakeWithSeconds(0,audioAsset.duration.timescale);
[trackMix setVolume:0.8f atTime:startTime];
//素材加入数组
[audioMixParams addObject:trackMix];
//音频列表数据
NSDictionary * audioDic = musicStartTimes[i];
NSDictionary * schedule = audioDic[@"schedule"];
NSString * enterTime = schedule[@"enterTime"];
Float64 tmpDuration = enterTime.floatValue;//开始时间
//Insert audio into track //offsetCMTimeMake(0, 44100)
ok = [videoTrack insertTimeRange:audio_timeRange ofTrack:sourceAudioTrack atTime:CMTimeMakeWithSeconds(tmpDuration,audioAsset.duration.timescale) error:&error];
// tmpDuration += CMTimeGetSeconds(audioAsset.duration);
}
return videoTrack;
}
#pragma mark - 调节合成的音量
+ (AVAudioMix *)buildAudioMixWithVideoTrack:(AVCompositionTrack *)videoTrack VideoVolume:(float)videoVolume BGMTrack:(AVCompositionTrack *)BGMTrack BGMVolume:(float)BGMVolume controlVolumeRange:(CMTime)volumeRange {
// 创建音频混合类
AVMutableAudioMix *audioMix = [AVMutableAudioMix audioMix];
// 拿到视频声音轨道设置音量
/* CMTime CMTimeMake (
int64_t value, //表示 当前视频播放到的第几桢数
int32_t timescale //每秒的帧数
);
*/
AVMutableAudioMixInputParameters *Videoparameters = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:videoTrack];
[Videoparameters setVolume:videoVolume atTime:CMTimeMakeWithSeconds(5,600)];
// 设置背景音乐音量
AVMutableAudioMixInputParameters *BGMparameters = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:BGMTrack];
[BGMparameters setVolume:BGMVolume atTime:volumeRange];
// 加入混合数组
audioMix.inputParameters = @[Videoparameters,BGMparameters];
return audioMix;
}
@end