logo头像
Snippet 博客主题

iOS视频开发(二)

本文于2005天之前发表,文中内容可能已经过时。

此文是iOS视频开发的第二篇,相比第一篇来说视频和音频的可定制化更加多样化,本文总结的只是一些简单的知识,通过此文可以对音视频的采集和编辑有大致的了解。

通过AVAssetWriterAVCaptureSession

此文系列的第一篇已经对AVCaptureSession录制视频的基本过程有了大致的介绍,
AVCaptureSessionAVAssetWriter的基本步骤类似,但是通过AVAssetWriter可以实现更高度化的音视频编辑功能,设置视频和音频的一些属性,给视频加滤镜,音频变声等功能。

基本步骤
  1. 初始化一个AVCaptureSession
  2. 获取音频device和视频device
  3. 初始化videoInput&audioInput
  4. 初始化videoDataOutPut以及audioDataOutPut以及分别对应的queue
  5. 获取videoConnectionaudioConnection
  6. Inputoutput加入AVCaptureSession
  7. 初始化AVAssetWriter

1~3请参考前文。

四、建立videoOutput和videoOutput
//audioOutput
- (AVCaptureAudioDataOutput *)audioDataOutput{
    if (!_audioDataOutput) {
        _audioDataOutput = [[AVCaptureAudioDataOutput alloc]init];
        //设置音频输出队列
        [_audioDataOutput setSampleBufferDelegate:self queue:self.audioDataOutputQueue];
    }
    return _audioDataOutput;
}
//videoOutput
-(AVCaptureVideoDataOutput *)videoDataOutput{
    if (!_videoDataOutput) {
        _videoDataOutput = [[AVCaptureVideoDataOutput alloc]init];
        _videoDataOutput.videoSettings = nil;
        _videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
        //设置视频输出队列
        [_videoDataOutput setSampleBufferDelegate:self queue:self.videoDataOutputQueue];
    }
    return _videoDataOutput;
}
五、获取videoConnection和audioConnection
_videoConnection = [self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
_audioConnection = [self.audioDataOutput connectionWithMediaType:AVMediaTypeAudio];
6、将Inputoutput加入AVCaptureSession
 _session = [[AVCaptureSession alloc]init];
        //        设置视频质量
        if ([_session canSetSessionPreset:AVCaptureSessionPresetHigh]) {
            [_session setSessionPreset:AVCaptureSessionPresetHigh];
        }
        if ([_session canAddInput:self.videoInput]) {
            [_session addInput:self.videoInput];
        }
        if ([_session canAddInput:self.audioInput]) {
            [_session addInput:self.audioInput];
        }
        
        
        if ([_session canAddOutput:self.videoDataOutput]) {
            [_session addOutput:self.videoDataOutput];
        }
        _videoConnection = [self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
        if ([_session canAddOutput:self.audioDataOutput]) {
            [_session addOutput:self.audioDataOutput];
        }
        _audioConnection = [self.audioDataOutput connectionWithMediaType:AVMediaTypeAudio];
七、AVAssetWriter
//一个url路径对应一个AVAssetWriter
 _assetWriter = [[AVAssetWriter alloc]initWithURL:filePathUrl fileType:AVFileTypeQuickTimeMovie error:&error];
    if (error) {
        NSLog(@"%@",error.localizedDescription);
        return;
    }
    
    dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW, 0), ^{
        @autoreleasepool {
//            视频属性
            if ([_assetWriter canApplyOutputSettings:self.videoCompressionSettings forMediaType:AVMediaTypeVideo]) {
                self.videoAssetWriterInput = [[AVAssetWriterInput alloc]initWithMediaType:AVMediaTypeVideo outputSettings:self.videoCompressionSettings];
                self.videoAssetWriterInput.expectsMediaDataInRealTime = YES;
                if([_assetWriter canAddInput:self.videoAssetWriterInput]){
                    [_assetWriter addInput:self.videoAssetWriterInput];
                }
            }
            
//            音频属性
 NSDictionary *  audioSettings = @{ AVFormatIDKey : @(kAudioFormatMPEG4AAC) };
            if ([_assetWriter canApplyOutputSettings:audioSettings forMediaType:AVMediaTypeAudio]) {
                self.audioAssetWriterInput = [[AVAssetWriterInput alloc]initWithMediaType:AVMediaTypeAudio outputSettings:audioSettings sourceFormatHint:self.outputAudioFormatDescription];
                self.audioAssetWriterInput.expectsMediaDataInRealTime = YES;
                if ([_assetWriter canAddInput:self.audioAssetWriterInput]) {
                    [_assetWriter addInput:self.audioAssetWriterInput];
                }
            }
            [_assetWriter startWriting];
            
        }
    });
代理中写入CMSampleBufferRef
- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
    //    不要渲染第一个采样缓冲区。 第一个采样缓冲区标识视频的信息
    //    提供(33ms at 30fps)针间隔
    //    最好异步执行,防止在帧无法在读写比较慢的设备上缓存
    //    当视频的设置更改之后,此信息也需要更新
    CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer);
    CFRetain(sampleBuffer);
    if (connection == self.videoConnection) {
        if (self.outputVideoFormatDescription == nil) {
            self.outputVideoFormatDescription = formatDescription;
        }else{
            self.outputVideoFormatDescription = formatDescription;
            dispatch_async(self.writingQueue, ^{
                @autoreleasepool {
                    @synchronized(self){
                        if (_assetWriter.status != 1) {
                            CFRelease(sampleBuffer);
                            return ;
                        }
                        if (!_sessionStart) {
                            [_assetWriter startSessionAtSourceTime:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)];
                            _sessionStart = YES;
                        }

                        if (self.videoAssetWriterInput.readyForMoreMediaData && _assetWriter.status ==1) {
                           BOOL su = [self.videoAssetWriterInput appendSampleBuffer:sampleBuffer];
                            if (!su) {
                                NSLog(@"%@ ---video", _assetWriter.error.localizedDescription);
                            }
                        }
                        CFRelease(sampleBuffer);
                    }
                }
            });
            
        }
    }else if(connection == self.audioConnection){
        self.outputAudioFormatDescription = formatDescription;
        dispatch_async(self.writingQueue, ^{
        @synchronized(self){
            if (_assetWriter.status != 1) {
                CFRelease(sampleBuffer);
                return;
            }
            if(self.audioAssetWriterInput.readyForMoreMediaData && _assetWriter.status == 1){
               BOOL su =  [self.audioAssetWriterInput appendSampleBuffer:sampleBuffer];
                if (!su) {
                    NSLog(@"%@",_assetWriter.error.localizedDescription);
                }
            }
            CFRelease(sampleBuffer);
        }
        });
    }
}

大致就是这么一个流程,但是其中涉及的东西特别多,还不能一一的列举出来,接下来的时间里再慢慢研究这方面的东西吧。

上一篇