视频录制

视频录制,从总体上讲,界面是用xib画的,进入到录制界面,上面有四个按钮,分别是退出、前后置摄像头切换、闪光灯、播放按钮,这四个按钮在一个view上,同过调整view的约束,可以改变view的位置。下面是一个进度条,这是一个自定义的view。最下面有两个button,一个是录制按钮,另一个是调取相册按钮。

最终图片

 

思路:1.对录制的视频进行编码封装的类

#import "MLRecordEncoder.h"

@interface MLRecordEncoder ()

@property (nonatomic, strong) AVAssetWriter * writer;  //媒体写入对象
@property (nonatomic, strong) AVAssetWriterInput * videoInput;  //视频写入
@property (nonatomic, strong) AVAssetWriterInput * audioInput;  //音频写入
@property (nonatomic, strong) NSString * path;  //写入路径

@end

@implementation MLRecordEncoder

- (void)dealloc{
    _writer = nil;
    _videoInput = nil;
    _audioInput = nil;
    _path = nil;
}

//MLRecordEncoder遍历构造器的
+ (MLRecordEncoder *)encoderForPath:(NSString *)path height:(NSInteger)cy width:(NSInteger)cx channels:(int)ch samples:(Float64)rate{
    MLRecordEncoder * enc = [MLRecordEncoder alloc];
    return [enc initPath:path height:cy width:cx channels:ch samples:rate];
}

//初始化方法
- (instancetype)initPath:(NSString *)path height:(NSInteger)cy width:(NSInteger)cx channels:(int)ch samples:(Float64)rate{
    if (self = [super init]) {
        self.path = path;
        //先把路径下的文件给删除掉,保证录制的文件是最新的
        [[NSFileManager defaultManager] removeItemAtPath:self.path error:nil];
        NSURL * url = [NSURL fileURLWithPath:self.path];
        //初始化写入媒体类型为Mp4类型
        _writer = [AVAssetWriter assetWriterWithURL:url fileType:AVFileTypeMPEG4 error:nil];
        //使其更适合在网络上播放
        _writer.shouldOptimizeForNetworkUse = YES;
        //初始化视频输入
        [self initVideoInputHeight:cy width:cx];
        //确保采集到rate和ch
        if (rate != 0 && ch != 0) {
            //初始化音频输入
            [self initAudioInputChannels:ch samples:rate];
        }
    }
    return self;
}

//初始化视频输入
- (void)initVideoInputHeight:(NSInteger)cy width:(NSInteger)cx{
    //录制视频的一些配置、分辨率、编码方式等等
    NSDictionary * settings = [NSDictionary dictionaryWithObjectsAndKeys:
                               AVVideoCodecH264, AVVideoCodecKey,
                               [NSNumber numberWithInteger:cx], AVVideoWidthKey,
                               [NSNumber numberWithInteger:cy], AVVideoHeightKey, nil];
    //初始化视频写入类
    _videoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:settings];
    //表明输入是否应该调整其处理为实时数据源的数据
    _videoInput.expectsMediaDataInRealTime = YES;
    //将视频输入源加入
    [_writer addInput:_videoInput];
}

//初始化音频输入
- (void)initAudioInputChannels:(int)ch samples:(Float64)rate{
    //音频的一些配置包括音频各种这里为AAC,音频通道、采样率和音频的比特率
    NSDictionary * settings = [NSDictionary dictionaryWithObjectsAndKeys:
                               [NSNumber numberWithInt:kAudioFormatMPEG4AAC], AVFormatIDKey,
                               [NSNumber numberWithInt:ch], AVNumberOfChannelsKey,
                               [NSNumber numberWithFloat:rate], AVSampleRateKey,
                               [NSNumber numberWithInt:128000], AVEncoderBitRateKey, nil];
    //初始化音频写入类
    _audioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:settings];
    //表明输入是否应该调整其处理为实时数据源的数据
    _audioInput.expectsMediaDataInRealTime = YES;
    //将音频输入源加入
    [_writer addInput:_audioInput];
}

//完成视频录制时调用
- (void)finishWithCompletionHandler:(void (^)(void))handler{
    [_writer finishWritingWithCompletionHandler:handler];
}

//通过这个方法写入数据
- (BOOL)encodeFrame:(CMSampleBufferRef)sampleBuffer isVideo:(BOOL)isVideo{
    //数据是否准备写入
    if (CMSampleBufferDataIsReady(sampleBuffer)) {
        //写入状态为未知,保证视频先写入
        if (_writer.status == AVAssetWriterStatusUnknown && isVideo) {
            //获取开始写入的CMTime
            CMTime startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
            //开始写入
            [_writer startWriting];
            [_writer startSessionAtSourceTime:startTime];
        }
        //写入失败
        if (_writer.status == AVAssetWriterStatusFailed) {
            NSLog(@"writer error %@",_writer.error.localizedDescription);
            return NO;
        }
        //判断是否是视频
        if (isVideo) {
            //视频输入是否准备接受更多的媒体数据
            if (_videoInput.readyForMoreMediaData == YES) {
                //拼接数据
                [_videoInput appendSampleBuffer:sampleBuffer];
                return YES;
            }
        }else{
            //音频输入是否准备接受更多的媒体数据
            if (_audioInput.readyForMoreMediaData) {
                //拼接数据
                [_audioInput appendSampleBuffer:sampleBuffer];
                return YES;
            }
        }
    }
    return NO;
}

@end

  2.对视频的录制进行封装

#import "MLRecordEngine.h"
#import "MLRecordEncoder.h"
#import <AVFoundation/AVFoundation.h>
#import <Photos/Photos.h>

@interface MLRecordEngine ()<AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate>
{
    CMTime _timeOffset; //录制的偏移CMTime
    CMTime _lastVideo; //记录上一次视频数据文件的CMTime
    CMTime _lastAudio; //记录上一次音频数据文件的CMTime
    
    NSInteger _cx; //视频分辨的宽
    NSInteger _cy; //视频分辨的高
    int _channels; //音频通道
    Float64 _samplerate; //音频采样率
}

@property (nonatomic, strong) MLRecordEncoder * recordEncoder; //录制编码
@property (nonatomic, strong) AVCaptureSession * recordSession; //捕获视频的会话
@property (nonatomic, strong) AVCaptureVideoPreviewLayer * previewLayer; //捕获到的视频呈现的layer
@property (nonatomic, strong) AVCaptureDeviceInput * backCameraInput; //后置摄像头输入
@property (nonatomic, strong) AVCaptureDeviceInput * frontCameraInput; //前置摄像头输入
@property (nonatomic, strong) AVCaptureDeviceInput * audioMicInput; //麦克风输入
@property (nonatomic, copy) dispatch_queue_t captureQueue; //录制的队列
@property (nonatomic, strong) AVCaptureConnection * audioConnection; //音频录制连接
@property (nonatomic, strong) AVCaptureConnection * videoConnection; //视频录制连接
@property (nonatomic, strong) AVCaptureVideoDataOutput * videoOutput; //视频输出
@property (nonatomic, strong) AVCaptureAudioDataOutput * audioOutput; //音频输出
@property (atomic, assign) BOOL isCapturing; //正在录制
@property (atomic, assign) BOOL isPaused; //是否暂停
@property (atomic, assign) BOOL discont; //是否中断
@property (atomic, assign) CMTime startTime; //开始录制的时间
@property (atomic, assign) CGFloat currentRecordTime; //当前录制时间

@end

@implementation MLRecordEngine

- (void)dealloc{
    [_recordSession stopRunning];
    
    _captureQueue = nil;
    _recordSession = nil;
    _previewLayer = nil;
    _backCameraInput = nil;
    _frontCameraInput = nil;
    _audioOutput = nil;
    _videoOutput = nil;
    _audioConnection = nil;
    _videoConnection = nil;
    _recordEncoder = nil;
}

- (instancetype)init{
    if (self = [super init]) {
        self.maxRecordTime = 60.0f;
    }
    return self;
}

#pragma mark - 公开的方法
//启用录制功能
- (void)startUp{
    self.startTime = CMTimeMake(0, 0);
    self.isCapturing = NO;
    self.isPaused = NO;
    self.discont = NO;
    [self.recordSession startRunning];
}

//关闭录制功能
- (void)shutdown{
    _startTime = CMTimeMake(0, 0);
    if (_recordSession) {
        [_recordSession stopRunning];
    }
    [_recordEncoder finishWithCompletionHandler:^{
        NSLog(@"录制完成");
    }];
}

//开始录制
- (void)startCapture{
    @synchronized(self) {
        if (!self.isCapturing) {
            NSLog(@"开始录制");
            self.recordEncoder = nil;
            self.isPaused = NO;
            self.discont = NO;
            _timeOffset = CMTimeMake(0, 0);
            self.isCapturing = YES;
        }
    }
}

//暂停录制
- (void)pauseCapture{
    @synchronized(self) {
        if (self.isCapturing) {
            self.isPaused = YES;
            self.discont = YES;
        }
    }
}

//继续录制
- (void)resumeCapture{
    @synchronized(self) {
        if (self.isPaused) {
            self.isPaused = NO;
        }
    }
}

//停止录制
- (void)stopCaptureHandler:(void (^)(UIImage *))handler{
    @synchronized(self) {
        if (self.isCapturing) {
            NSString * path = self.recordEncoder.path;
            NSURL * url = [NSURL fileURLWithPath:path];
            self.isCapturing = NO;
            dispatch_async(_captureQueue, ^{
                [self.recordEncoder finishWithCompletionHandler:^{
                    self.isCapturing = NO;
                    self.recordEncoder = nil;
                    [[PHPhotoLibrary sharedPhotoLibrary] performChanges:^{
                        [PHAssetChangeRequest creationRequestForAssetFromVideoAtFileURL:url];
                    } completionHandler:^(BOOL success, NSError * _Nullable error) {
                        NSLog(@"保存成功");
                    }];
                    [self movieToImageHandler:handler];
                }];
            });
        }
    }
}

//获取视频第一帧的图片
- (void)movieToImageHandler:(void (^) (UIImage * movieImage))handler{
    NSURL * url = [NSURL fileURLWithPath:self.videoPath];
    AVURLAsset * asset = [[AVURLAsset alloc] initWithURL:url options:nil];
    AVAssetImageGenerator * generator = [[AVAssetImageGenerator alloc] initWithAsset:asset];
    generator.appliesPreferredTrackTransform = TRUE;
    CMTime thumbTime = CMTimeMakeWithSeconds(0, 60);
    generator.apertureMode = AVAssetImageGeneratorApertureModeEncodedPixels;
    AVAssetImageGeneratorCompletionHandler generatorHandler = ^(CMTime requestedTime, CGImageRef im, CMTime actualTime, AVAssetImageGeneratorResult result, NSError * error){
        if (result == AVAssetImageGeneratorSucceeded) {
            UIImage * thumbImg = [UIImage imageWithCGImage:im];
            if (handler) {
                dispatch_async(dispatch_get_main_queue(), ^{
                    handler(thumbImg);
                });
            }
        }
    };
    [generator generateCGImagesAsynchronouslyForTimes:[NSArray arrayWithObject:[NSValue valueWithCMTime:thumbTime]] completionHandler:generatorHandler];
}

#pragma mark - set、get方法
//捕获视频的会话
- (AVCaptureSession *)recordSession{
    if (_recordSession == nil) {
        _recordSession = [[AVCaptureSession alloc] init];
        //添加后置摄像头的输入
        if ([_recordSession canAddInput:self.backCameraInput]) {
            [_recordSession addInput:self.backCameraInput];
        }
        //添加后置麦克风的输入
        if ([_recordSession canAddInput:self.audioMicInput]) {
            [_recordSession addInput:self.audioMicInput];
        }
        //添加视频的输出
        if ([_recordSession canAddOutput:self.videoOutput]) {
            [_recordSession addOutput:self.videoOutput];
            //设置视频的分辨率
            _cx = 720;
            _cy = 1280;
        }
        //添加音频的输出
        if ([_recordSession canAddOutput:self.audioOutput]) {
            [_recordSession addOutput:self.audioOutput];
        }
        //设置视频录制的方向
        self.videoConnection.videoOrientation = AVCaptureVideoOrientationPortrait;
    }
    return _recordSession;
}

//后置摄像头输入
- (AVCaptureDeviceInput *)backCameraInput{
    if (_backCameraInput == nil) {
        NSError * error;
        _backCameraInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self backCamera] error:&error];
        if (error) {
            NSLog(@"获取后置摄像头失败~");
        }
    }
    return _backCameraInput;
}

//前置摄像头输入
- (AVCaptureDeviceInput *)frontCameraInput{
    if (_frontCameraInput == nil) {
        NSError * error;
        _frontCameraInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self frontCamera] error:&error];
        if (error) {
            NSLog(@"获取前置摄像头失败~");
        }
    }
    return _frontCameraInput;
}

//麦克风输入
- (AVCaptureDeviceInput *)audioMicInput{
    if (_audioMicInput == nil) {
        AVCaptureDevice * mic = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
        NSError * error;
        _audioMicInput = [AVCaptureDeviceInput deviceInputWithDevice:mic error:&error];
        if (error) {
            NSLog(@"获取麦克风失败~");
        }
    }
    return _audioMicInput;
}

//视频输出
- (AVCaptureVideoDataOutput *)videoOutput{
    if (_videoOutput == nil) {
        _videoOutput = [[AVCaptureVideoDataOutput alloc] init];
        [_videoOutput setSampleBufferDelegate:self queue:self.captureQueue];
        NSDictionary * setcapSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                         [NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey, nil];
        _videoOutput.videoSettings = setcapSettings;
    }
    return _videoOutput;
}

//音频输出
- (AVCaptureAudioDataOutput *)audioOutput{
    if (_audioOutput == nil) {
        _audioOutput = [[AVCaptureAudioDataOutput alloc] init];
        [_audioOutput setSampleBufferDelegate:self queue:self.captureQueue];
    }
    return _audioOutput;
}

//视频连接
- (AVCaptureConnection *)videoConnection{
    _videoConnection = [self.videoOutput connectionWithMediaType:AVMediaTypeVideo];
    return _videoConnection;
}

//音频连接
- (AVCaptureConnection *)audioConnection{
    if (_audioConnection == nil) {
        _audioConnection = [self.audioOutput connectionWithMediaType:AVMediaTypeAudio];
    }
    return _audioConnection;
}

//捕获到的视频呈现的layer
- (AVCaptureVideoPreviewLayer *)previewLayer{
    if (_previewLayer == nil) {
        //通过AVCaptureSession初始化
        AVCaptureVideoPreviewLayer * preview = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.recordSession];
        //设置比例为铺满全屏
        preview.videoGravity = AVLayerVideoGravityResizeAspectFill;
        _previewLayer = preview;
    }
    return _previewLayer;
}

//录制的队列
- (dispatch_queue_t)captureQueue{
    if (_captureQueue == nil) {
        _captureQueue = dispatch_queue_create("cn.qiuyouqun.im.wclrecordengine.capture", DISPATCH_QUEUE_SERIAL);
    }
    return _captureQueue;
}

#pragma mark - 切换动画
- (void)changeCameraAnimation{
    CATransition * changeAnimation = [CATransition animation];
    changeAnimation.delegate = self;
    changeAnimation.duration = 0.45;
    changeAnimation.type = @"oglFlip";
    changeAnimation.subtype = kCATransitionFromRight;
    changeAnimation.timingFunction = UIViewAnimationCurveEaseInOut;
    [self.previewLayer addAnimation:changeAnimation forKey:@"changeAnimation"];
}

- (void)animationDidStart:(CAAnimation *)anim{
    self.videoConnection.videoOrientation = AVCaptureVideoOrientationPortrait;
    [self.recordSession startRunning];
}

#pragma mark - 将mov文件转为Mp4文件
- (void)changeMovToMp4:(NSURL *)mediaURL dataBlock:(void (^)(UIImage *))handler{
    AVAsset * video = [AVAsset assetWithURL:mediaURL];
    AVAssetExportSession * exportSession = [AVAssetExportSession exportSessionWithAsset:video presetName:AVAssetExportPreset1280x720];
    exportSession.shouldOptimizeForNetworkUse = YES;
    exportSession.outputFileType = AVFileTypeMPEG4;
    NSString * basePath = [self getVideoCachePath];
    
    self.videoPath = [basePath stringByAppendingPathComponent:[self getUploadFile_type:@"video" fileType:@"mp4"]];
    exportSession.outputURL = [NSURL fileURLWithPath:self.videoPath];
    [exportSession exportAsynchronouslyWithCompletionHandler:^{
        [self movieToImageHandler:handler];
    }];
}

#pragma mark - 视频相关
//返回前置摄像头
- (AVCaptureDevice *)frontCamera{
    return [self cameraWithPosition:AVCaptureDevicePositionFront];
}

//返回后置摄像头
- (AVCaptureDevice *)backCamera{
    return [self cameraWithPosition:AVCaptureDevicePositionBack];
}

//切换前后置摄像头
- (void)changeCameraInputDeviceisFront:(BOOL)isFront{
    if (isFront) {
        [self.recordSession stopRunning];
        [self.recordSession removeInput:self.backCameraInput];
        if ([self.recordSession canAddInput:self.frontCameraInput]) {
            [self changeCameraAnimation];
            [self.recordSession addInput:self.frontCameraInput];
        }
    }else{
        [self.recordSession stopRunning];
        [self.recordSession removeInput:self.frontCameraInput];
        if ([self.recordSession canAddInput:self.backCameraInput]) {
            [self changeCameraAnimation];
            [self.recordSession addInput:self.backCameraInput];
        }
    }
}

//用来返回时前置摄像头还是后置摄像头
- (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition)position{
    //返回和视频录制相关的所有默认设备
    NSArray * devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
    //遍历这些设备返回跟position相关的设备
    for (AVCaptureDevice * device in devices) {
        if ([device position] == position) {
            return device;
        }
    }    return nil;
}

//开启闪光灯
- (void)openFlashLight{
    AVCaptureDevice * backCamera = [self backCamera];
    if (backCamera.torchMode == AVCaptureTorchModeOff) {
        [backCamera lockForConfiguration:nil];
        backCamera.torchMode = AVCaptureTorchModeOn;
        backCamera.flashMode = AVCaptureTorchModeOn;
        [backCamera unlockForConfiguration];
    }
}

//关闭闪光灯
- (void)closeFlashLight{
    AVCaptureDevice * backCamera = [self backCamera];
    if (backCamera.torchMode == AVCaptureTorchModeOn) {
        [backCamera lockForConfiguration:nil];
        backCamera.torchMode = AVCaptureTorchModeOff;
        backCamera.flashMode = AVCaptureTorchModeOff;
        [backCamera unlockForConfiguration];
    }
}

//获得视频存放地址
- (NSString *)getVideoCachePath{
    NSString * videoCache = [NSTemporaryDirectory() stringByAppendingPathComponent:@"videos"];
    BOOL isDir = NO;
    NSFileManager * fileManager = [NSFileManager defaultManager];
    BOOL existed = [fileManager fileExistsAtPath:videoCache isDirectory:&isDir];
    if (!(isDir == YES && existed == YES)) {
        [fileManager createDirectoryAtPath:videoCache withIntermediateDirectories:YES attributes:nil error:nil];
    }
    return videoCache;
}

- (NSString *)getUploadFile_type:(NSString *)type fileType:(NSString *)fileType{
    NSTimeInterval now = [[NSDate date] timeIntervalSince1970];
    NSDateFormatter * formatter = [[NSDateFormatter alloc] init];
    [formatter setDateFormat:@"HHmmss"];
    NSDate * nowDate = [NSDate dateWithTimeIntervalSince1970:now];
    NSString * timeStr = [formatter stringFromDate:nowDate];
    NSString * fileName = [NSString stringWithFormat:@"%@_%@.%@",type, timeStr, fileType];
    return fileName;
}

#pragma mark - 写入数据
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
    BOOL isVideo = YES;
    @synchronized(self) {
        if (!self.isCapturing || self.isPaused) {
            return;
        }
        if (captureOutput != self.videoOutput) {
            isVideo = NO;
        }
        //初始化编码器,当有音频和视频参数时创建编码器
        if ((self.recordEncoder == nil) && !isVideo) {
            CMFormatDescriptionRef fmt = CMSampleBufferGetFormatDescription(sampleBuffer);
            [self setAudioFormat:fmt];
            NSString * videoName = [self getUploadFile_type:@"video" fileType:@"mp4"];
            self.videoPath = [[self getVideoCachePath] stringByAppendingPathComponent:videoName];
            self.recordEncoder = [MLRecordEncoder encoderForPath:self.videoPath height:_cy width:_cx channels:_channels samples:_samplerate];
        }
        //判断是否中断录制过
        if (self.discont) {
            if (isVideo) {
                return;
            }
            self.discont = NO;
            //计算暂停的时间
            CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
            CMTime last = isVideo ? _lastVideo : _lastAudio;
            if (last.flags & kCMTimeFlags_Valid) {
                if (_timeOffset.flags & kCMTimeFlags_Valid) {
                    pts = CMTimeSubtract(pts, _timeOffset);
                }
                CMTime offset = CMTimeSubtract(pts, last);
                if (_timeOffset.value == 0) {
                    _timeOffset = offset;
                }else{
                    _timeOffset = CMTimeAdd(_timeOffset, offset);
                }
            }
            _lastVideo.flags = 0;
            _lastAudio.flags = 0;
        }
        //增加sampleBuffer的引用计时,这样我们可以释放这个或修改这个数据,防止在修改时被释放
        CFRetain(sampleBuffer);
        if (_timeOffset.value > 0) {
            CFRelease(sampleBuffer);
            //根据得到的timeOffset调整
            sampleBuffer = [self adjustTime:sampleBuffer by:_timeOffset];
        }
        //记录暂停上一次录制的时间
        CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
        CMTime dur = CMSampleBufferGetDuration(sampleBuffer);
        if (dur.value > 0) {
            pts = CMTimeAdd(pts, dur);
        }
        if (isVideo) {
            _lastVideo = pts;
        }else{
            _lastAudio = pts;
        }
    }
    CMTime dur = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
    if (self.startTime.value == 0) {
        self.startTime = dur;
    }
    CMTime sub = CMTimeSubtract(dur, self.startTime);
    self.currentRecordTime = CMTimeGetSeconds(sub);
    if (self.currentRecordTime > self.maxRecordTime) {
        if (self.currentRecordTime - self.maxRecordTime < 0.1) {
            if ([self.delegate respondsToSelector:@selector(recordProgress:)]) {
                dispatch_async(dispatch_get_main_queue(), ^{
                    [self.delegate recordProgress:self.currentRecordTime / self.maxRecordTime];
                });
            }
        }
        return;
    }
    if ([self.delegate respondsToSelector:@selector(recordProgress:)]) {
        dispatch_async(dispatch_get_main_queue(), ^{
            [self.delegate recordProgress:self.currentRecordTime / self.maxRecordTime];
        });
    }
    //进行数据编码
    [self.recordEncoder encodeFrame:sampleBuffer isVideo:isVideo];
    CFRelease(sampleBuffer);
}

//设置音频格式
- (void)setAudioFormat:(CMFormatDescriptionRef)fmt{
    const AudioStreamBasicDescription * asbd = CMAudioFormatDescriptionGetStreamBasicDescription(fmt);
    _samplerate = asbd->mSampleRate;
    _channels = asbd->mChannelsPerFrame;
}

//调整媒体数据的时间
- (CMSampleBufferRef)adjustTime:(CMSampleBufferRef)sample by:(CMTime)offset{
    CMItemCount count;
    CMSampleBufferGetSampleTimingInfoArray(sample, 0, nil, &count);
    CMSampleTimingInfo * pInfo = malloc(sizeof(CMSampleTimingInfo) * count);
    CMSampleBufferGetSampleTimingInfoArray(sample, count, pInfo, &count);
    for (CMItemCount i = 0; i < count; i++) {
        pInfo[i].decodeTimeStamp = CMTimeSubtract(pInfo[i].decodeTimeStamp, offset);
        pInfo[i].presentationTimeStamp = CMTimeSubtract(pInfo[i].presentationTimeStamp, offset);
    }
    CMSampleBufferRef sout;
    CMSampleBufferCreateCopyWithNewTiming(nil, sample, count, pInfo, &sout);
    free(pInfo);
    return sout;
}

@end

  3.对进度条的封装

#import "MLRecordProgressView.h"

@implementation MLRecordProgressView

- (void)setProgress:(CGFloat)progress{
    _progress = progress;
    [self setNeedsDisplay];
}

- (void)setProgressBgColor:(UIColor *)progressBgColor{
    _progressBgColor = progressBgColor;
    [self setNeedsDisplay];
}

- (void)setLoadProgressColor:(UIColor *)loadProgressColor{
    _loadProgressColor = loadProgressColor;
    [self setNeedsDisplay];
}

- (void)setLoadProgress:(CGFloat)loadProgress{
    _loadProgress = loadProgress;
    [self setNeedsDisplay];
}

- (void)setProgressColor:(UIColor *)progressColor{
    _progressColor = progressColor;
    [self setNeedsDisplay];
}

- (void)drawRect:(CGRect)rect{
    CGContextRef context = UIGraphicsGetCurrentContext();
    CGContextAddRect(context, CGRectMake(0, 0, rect.size.width, rect.size.height));
    [self.progressBgColor set];
    CGContextSetAlpha(context, 0.5);
    CGContextDrawPath(context, kCGPathFill);
    CGContextAddRect(context, CGRectMake(0, 0, rect.size.width * self.self.loadProgress, rect.size.height));
    [self.progressBgColor set];
    CGContextSetAlpha(context, 1);
    CGContextDrawPath(context, kCGPathFill);
    CGContextAddRect(context, CGRectMake(0, 0, rect.size.width * self.progress, rect.size.height));
    [self.progressColor set];
    CGContextSetAlpha(context, 1);
    CGContextDrawPath(context, kCGPathFill);
}

@end

源码地址:https://github.com/ChangYulong/MLRecordVideo

  

posted @ 2016-07-29 16:44  Make.Langmuir  阅读(961)  评论(0编辑  收藏  举报