美文网首页音视频
iOS 自定义AVAssetExportSession

iOS 自定义AVAssetExportSession

作者: 李田迎 | 来源:发表于2019-07-19 10:17 被阅读0次

    简介:

    使用AVAssetWriter AVAssetReader 等相关类,实现了AVAssetExportSession相关功能,最初的目的是为定位AVAssetExportSession导出过程出现卡住,不回调block的bug.

    说明:

    1.本类编写借鉴了SDAVAssetExportSession

    2.本类目前不支持实时进度功能

    3.copyNextSampleBuffer 函数不返回,是导致export过程卡住的原因, 需要修改videoComposition的CMTimeScale来避免.尽量不要出现让reader读取不是整数帧时间(CMTime)的情况发生.

    例如:CMTime start = CMTimeMakeWithSeconds(CMTimeGetSeconds(_clipRange.start), 30);

    简易流程图

    image.png
    
    //
    
    //  AHAVAssetExportSession.h
    
    //  demo
    
    //
    
    //  Created by 李田迎 on 2019/7/18.
    
    //  Copyright © 2019. All rights reserved.
    
    //  自定义实现 多媒体数据导出到本地功能
    
    #import
    
    #import
    
    NS_ASSUME_NONNULL_BEGIN
    
    @interfaceAHAVAssetExportSession :NSObject
    
    //入参
    
    @property(nonatomic,copy)NSString*outputFileType;          //!< 导出文件类型
    
    @property(nonatomic,copy)NSURL*outputURL;                  //!< 导出资源路径
    
    @property(nonatomic,copy)NSDictionary*videoSettings;        //!< 视频配置参数 有默认参数
    
    @property(nonatomic,copy)NSDictionary*audioSettings;        //!< 音频配置参数 有默认参数
    
    @property(nonatomic,copy)NSArray*metadata;                  //!< 多媒体数据中的元数据
    
    @property(nonatomic,copy)AVVideoComposition*videoComposition;//!< 视频轨道组合对象
    
    @property(nonatomic,copy)AVAudioMix*audioMix;              //!< 音轨混合对象
    
    @property(nonatomic,assign)CGSizetargetSize;                //!< 导出视频size
    
    @property(nonatomic,assign)BOOLshouldOptimizeForNetworkUse;//!< 是否使用网络优化
    
    //出参
    
    @property (nonatomic, assign, readonly) AVAssetExportSessionStatusstatus;  //!< 导出状态
    
    @property(nonatomic,strong,readonly)NSError*error;                    //!< 导出错误信息
    
    @property(nonatomic,assign,readonly)CGFloatprogress;                  //!< 导出进度 0-1
    
    /**
    
     初始化方法
    
     @param asset 多媒体资源
    
     @return 本类实例对象
    
     */
    
    - (id)initWithAsset:(AVAsset*)asset;
    
    /**
    
     导出函数
    
     @param handler 导出完成回调block
    
     */
    
    - (void)exportAsynchronouslyWithCompletionHandler:(void(^)(void))handler;
    
    /**
    
     取消中断导出操作
    
     */
    
    - (void)cancelExport;
    
    @end
    
    NS_ASSUME_NONNULL_END
    
    
    
    //
    
    //  AHAVAssetExportSession.m
    
    //  demo
    
    //
    
    //  Created by 李田迎 on 2019/7/18.
    
    //  Copyright © 2019. All rights reserved.
    
    // 
    
    #import "AHAVAssetExportSession.h"
    
    #import "AHVideoRecordDef.h"
    
    #import "AHVideoRecordCustomConfig.h"
    
    @interface AHAVAssetExportSession()
    
    @property(nonatomic,strong)AVAssetReader*assetReader;      //!< 资源读取对象
    
    //!< 视频组合读取输出对象
    
    @property (nonatomic, strong) AVAssetReaderVideoCompositionOutput *videoOutput;
    
    //!< 音频混合读取输出对象
    
    @property (nonatomic, strong) AVAssetReaderAudioMixOutput *audioOutput;
    
    @property(nonatomic,strong)AVAssetWriter*assetWriter;      //!< 多媒体文件写入
    
    @property (nonatomic, strong) AVAssetWriterInput*videoInput;  //!< 视频写入 append
    
    @property (nonatomic, strong) AVAssetWriterInput*audioInput;  //!< 音频写入对象
    
    @property (nonatomic, strong) AVAssetWriterInputPixelBufferAdaptor *videoPixelBufferAdaptor;
    
    @property(nonatomic,strong)dispatch_queue_tinputQueue;      //!< 多媒体数据写入队列
    
    @property(nonatomic,strong) AVAsset *asset;                  //!< 素材资源对象
    
    @property(nonatomic,strong)void(^completionHandler)(void);  //!< 回调block
    
    @property (nonatomic, assign, readwrite) CGFloat progress;      //!< 处理进度
    
    @end
    
    @implementationAHAVAssetExportSession {
    
        NSError *_error;
    
    }
    
    #pragma mark LifeCycle Method
    
    - (id)initWithAsset:(AVAsset *)asset {
    
        if(self= [superinit]) {
    
            self.asset = asset;
    
            _targetSize = CGSizeMake(KVideoPixelsWidth, kVideoPixelsHeight);
    
        }
    
    
    
        return self;
    
    }
    
    - (void)dealloc {
    
    
    
    }
    
    #pragma mark Public Method
    
    - (void)exportAsynchronouslyWithCompletionHandler:(void(^)(void))handler {
    
        [selfcancelExport];
    
        self.completionHandler = handler;
    
    
    
        //1.为添加assetReader添加视频音频读写对象
    
        if([self.assetReader canAddOutput:self.videoOutput]) {
    
            [self.assetReader addOutput:self.videoOutput];
    
        }
    
        if([self.assetReader canAddOutput:self.audioOutput]) {
    
            [self.assetReader addOutput:self.audioOutput];
    
        }
    
    
    
        //2.为asserWriter添加writerInput
    
        if([self.assetWriter canAddInput:self.videoInput]) {
    
            [self.assetWriter addInput:self.videoInput];
    
        }
    
        if([self.assetWriter canAddInput:self.audioInput]) {
    
            [self.assetWriter addInput:self.audioInput];
    
        }
    
    
    
        //3.开始读写操作
    
        [self.assetWriter startWriting];
    
        [self.assetReader startReading];
    
        [self.assetWriter startSessionAtSourceTime:kCMTimeZero];
    
    
    
        __blockBOOLvideoCompleted =NO;
    
        __blockBOOLaudioCompleted =NO;
    
        __weak typeof(self) wself = self;
    
        //4.请求视频轨道写入数据
    
        [self.videoInput requestMediaDataWhenReadyOnQueue:self.inputQueue usingBlock:^
    
         {
    
             if(![wself encodeReadySamplesFromOutput:wself.videoOutput toInput:wself.videoInput])
    
             {
    
                 @synchronized(wself)
    
                 {
    
                     videoCompleted =YES;
    
                     if(audioCompleted)
    
                     {
    
                         [wselffinish];
    
                     }
    
                 }
    
             }
    
         }];
    
    
    
        //5.请求音频轨道写入数据
    
        [self.audioInput requestMediaDataWhenReadyOnQueue:self.inputQueue usingBlock:^ {
    
             if (![wself encodeReadySamplesFromOutput:wself.audioOutput toInput:wself.audioInput]) {
    
                 @synchronized(wself) {
    
                     audioCompleted =YES;
    
                     if(videoCompleted) {
    
                         [wselffinish];
    
                     }
    
                 }
    
             }
    
         }];
    
    }
    
    - (void)cancelExport {
    
        if (!_inputQueue) {
    
            return;
    
        }
    
    
    
        dispatch_async(self.inputQueue, ^{
    
            [self.assetWritercancelWriting];
    
            [self.assetReadercancelReading];
    
            [selfcomplete];
    
            [selfreset];
    
        });
    
    }
    
    #pragma mark Private Method
    
    - (BOOL)encodeReadySamplesFromOutput:(AVAssetReaderOutput*)output toInput:(AVAssetWriterInput*)input {
    
        while (input.isReadyForMoreMediaData) {
    
            CMSampleBufferRef sampleBuffer = [output copyNextSampleBuffer];
    
            if(sampleBuffer) {
    
                BOOLhandled =NO;
    
                BOOLerror =NO;
    
    
    
                if (self.assetReader.status != AVAssetReaderStatusReading || self.assetWriter.status != AVAssetWriterStatusWriting) {
    
                    handled =YES;
    
                    error =YES;
    
                }
    
    
    
                if(!handled && ![inputappendSampleBuffer:sampleBuffer]) {
    
                    error =YES;
    
                }
    
                CFRelease(sampleBuffer);
    
    
    
                if(error) {
    
                    returnNO;
    
                }
    
    
    
            }else{
    
                [inputmarkAsFinished];
    
                returnNO;
    
            }
    
        }
    
    
    
        return YES;
    
    }
    
    - (void)reset {
    
        _error=nil;
    
        self.progress=0;
    
        if (_assetReader) {
    
            _assetReader=nil;
    
        }
    
        if(_videoOutput) {
    
            _videoOutput =nil;
    
        }
    
        if(_audioOutput) {
    
            _audioOutput =nil;
    
        }
    
    
    
        if(_assetWriter) {
    
            _assetWriter =nil;
    
        }
    
        if(_videoInput) {
    
            _videoInput =nil;
    
        }
    
        if(_videoPixelBufferAdaptor) {
    
            _videoPixelBufferAdaptor =nil;
    
        }
    
        if(_audioOutput) {
    
            _audioOutput =nil;
    
        }
    
        if(_inputQueue) {
    
            _inputQueue =nil;
    
        }
    
    
    
        self.completionHandler =nil;
    
    }
    
    - (void)finish {
    
        // Synchronized block to ensure we never cancel the writer before calling finishWritingWithCompletionHandler
    
        if(self.assetReader.status == AVAssetReaderStatusCancelled ||self.assetWriter.status == AVAssetWriterStatusCancelled) {
    
            return;
    
        }
    
    
    
        if(self.assetWriter.status == AVAssetWriterStatusFailed) {
    
            [selfcomplete];
    
    
    
        }elseif(self.assetReader.status == AVAssetReaderStatusFailed) {
    
            [self.assetWriter cancelWriting];
    
            [selfcomplete];
    
    
    
        }else{
    
            [self.assetWriter finishWritingWithCompletionHandler:^ {
    
                 [selfcomplete];
    
            }];
    
        }
    
    }
    
    - (void)complete {
    
        if(self.assetWriter.status == AVAssetWriterStatusFailed ||self.assetWriter.status == AVAssetWriterStatusCancelled) {
    
            [NSFileManager.defaultManager removeItemAtURL:self.outputURL error:nil];
    
        }
    
    
    
        if(self.completionHandler) {
    
            self.completionHandler();
    
            self.completionHandler =nil;
    
        }
    
    }
    
    - (NSError *)error {
    
        if(_error) {
    
            return_error;
    
        }else{
    
            returnself.assetWriter.error ? :self.assetReader.error;
    
        }
    
    }
    
    - (AVMutableVideoComposition *)buildDefaultVideoComposition {
    
        AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
    
        AVAssetTrack *videoTrack = [[self.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
    
    
    
        // get the frame rate from videoSettings, if not set then try to get it from the video track,
    
        // if not set (mainly when asset is AVComposition) then use the default frame rate of 30
    
        floattrackFrameRate =0;
    
        if(self.videoSettings) {
    
            NSDictionary *videoCompressionProperties = [self.videoSettings objectForKey:AVVideoCompressionPropertiesKey];
    
    
    
            if(videoCompressionProperties) {
    
                NSNumber *frameRate = [videoCompressionProperties objectForKey:AVVideoAverageNonDroppableFrameRateKey];
    
                if(frameRate) {
    
                    trackFrameRate = frameRate.floatValue;
    
                }
    
            }
    
    
    
        }else{
    
            trackFrameRate = [videoTrack nominalFrameRate];
    
        }
    
    
    
        if(trackFrameRate ==0) {
    
            trackFrameRate = kDefaultVideoFrameRate;
    
        }
    
    
    
        videoComposition.frameDuration = CMTimeMake(1, trackFrameRate);
    
        CGSize targetSize = CGSizeMake([self.videoSettings[AVVideoWidthKey] floatValue], [self.videoSettings[AVVideoHeightKey] floatValue]);
    
        CGSize naturalSize = [videoTrack naturalSize];
    
        CGAffineTransform transform = videoTrack.preferredTransform;
    
        // Workaround radar 31928389, see https://github.com/rs/SDAVAssetExportSession/pull/70 for more info
    
        if(transform.ty == -560) {
    
            transform.ty =0;
    
        }
    
    
    
        if(transform.tx == -560) {
    
            transform.tx =0;
    
        }
    
    
    
        CGFloat videoAngleInDegree  = atan2(transform.b, transform.a) *180/ M_PI;
    
        if(videoAngleInDegree ==90|| videoAngleInDegree == -90) {
    
            CGFloat width = naturalSize.width;
    
            naturalSize.width = naturalSize.height;
    
            naturalSize.height = width;
    
        }
    
        videoComposition.renderSize = naturalSize;
    
        // center inside
    
        {
    
            floatratio;
    
            floatxratio = targetSize.width / naturalSize.width;
    
            floatyratio = targetSize.height / naturalSize.height;
    
            ratio = MIN(xratio, yratio);
    
    
    
            floatpostWidth = naturalSize.width * ratio;
    
            floatpostHeight = naturalSize.height * ratio;
    
            floattransx = (targetSize.width - postWidth) /2;
    
            floattransy = (targetSize.height - postHeight) /2;
    
    
    
            CGAffineTransform matrix = CGAffineTransformMakeTranslation(transx / xratio, transy / yratio);
    
            matrix = CGAffineTransformScale(matrix, ratio / xratio, ratio / yratio);
    
            transform = CGAffineTransformConcat(transform, matrix);
    
        }
    
    
    
        // Make a "pass through video track" video composition.
    
        AVMutableVideoCompositionInstruction *passThroughInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    
        passThroughInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,self.asset.duration);
    
    
    
        AVMutableVideoCompositionLayerInstruction *passThroughLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
    
    
    
        [passThroughLayer setTransform:transform atTime:kCMTimeZero];
    
    
    
        passThroughInstruction.layerInstructions =@[passThroughLayer];
    
        videoComposition.instructions =@[passThroughInstruction];
    
    
    
        returnvideoComposition;
    
    }
    
    #pragma mark Get Method
    
    - (AVAssetExportSessionStatus)status {
    
        switch(self.assetWriter.status) {
    
            default:
    
            caseAVAssetWriterStatusUnknown:
    
                returnAVAssetExportSessionStatusUnknown;
    
            caseAVAssetWriterStatusWriting:
    
                returnAVAssetExportSessionStatusExporting;
    
            caseAVAssetWriterStatusFailed:
    
                returnAVAssetExportSessionStatusFailed;
    
            caseAVAssetWriterStatusCompleted:
    
                returnAVAssetExportSessionStatusCompleted;
    
            caseAVAssetWriterStatusCancelled:
    
                returnAVAssetExportSessionStatusCancelled;
    
        }
    
    }
    
    - (dispatch_queue_t)inputQueue {
    
        if(!_inputQueue) {
    
            _inputQueue = dispatch_queue_create("AHSVVideoEncoderInputQueue", DISPATCH_QUEUE_SERIAL);
    
        }
    
    
    
        return_inputQueue;
    
    }
    
    - (AVAssetReader *)assetReader {
    
        if(!_assetReader) {
    
            NSError *error;
    
            _assetReader = [[AVAssetReader alloc] initWithAsset:self.asset error:&error];
    
            if(error) {
    
                NSLog(@"assetReader 创建失败!! %@", error);
    
            }
    
        }
    
    
    
        return_assetReader;
    
    }
    
    - (AVAssetReaderVideoCompositionOutput *)videoOutput {
    
        if(!_videoOutput) {
    
            NSArray *videoTracks = [self.asset tracksWithMediaType:AVMediaTypeVideo];
    
            _videoOutput = [[AVAssetReaderVideoCompositionOutput alloc] initWithVideoTracks:videoTracks videoSettings:nil];
    
            _videoOutput.videoComposition =self.videoComposition;
    
        }
    
    
    
        return_videoOutput;
    
    }
    
    - (AVAssetReaderAudioMixOutput *)audioOutput {
    
        if(!_audioOutput) {
    
            NSArray *audioTracks = [self.asset tracksWithMediaType:AVMediaTypeAudio];
    
            _audioOutput = [AVAssetReaderAudioMixOutput assetReaderAudioMixOutputWithAudioTracks:audioTracks audioSettings:nil];
    
            _audioOutput.audioMix =self.audioMix;
    
        }
    
    
    
        return_audioOutput;
    
    }
    
    - (AVAssetWriterInputPixelBufferAdaptor *)videoPixelBufferAdaptor {
    
        if(!_videoPixelBufferAdaptor) {
    
            NSDictionary *pixelBufferAttributes =@{
    
                (id)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_32BGRA),
    
                (id)kCVPixelBufferWidthKey:@(self.videoOutput.videoComposition.renderSize.width),
    
                (id)kCVPixelBufferHeightKey:@(self.videoOutput.videoComposition.renderSize.height),
    
                @"IOSurfaceOpenGLESTextureCompatibility": @YES,
    
                @"IOSurfaceOpenGLESFBOCompatibility": @YES,
    
            };
    
            _videoPixelBufferAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:self.videoInput sourcePixelBufferAttributes:pixelBufferAttributes];
    
        }
    
    
    
        return_videoPixelBufferAdaptor;
    
    }
    
    - (AVVideoComposition *)videoComposition {
    
        if(!_videoComposition) {
    
            _videoComposition = [selfbuildDefaultVideoComposition];
    
        }
    
    
    
        return_videoComposition;
    
    }
    
    - (AVAssetWriter *)assetWriter {
    
        if(!_assetWriter) {
    
            NSError *writerError;
    
            _assetWriter = [AVAssetWriter assetWriterWithURL:self.outputURL fileType:self.outputFileType error:&writerError];
    
            _assetWriter.shouldOptimizeForNetworkUse =self.shouldOptimizeForNetworkUse;
    
            if(writerError) {
    
                NSLog(@"assetWriterc 创建失败 %@", writerError);
    
            }
    
        }
    
    
    
        return_assetWriter;
    
    }
    
    - (AVAssetWriterInput *)videoInput {
    
        if(!_videoInput) {
    
            _videoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:self.videoSettings];
    
            _videoInput.expectsMediaDataInRealTime =NO;
    
        }
    
    
    
        return_videoInput;
    
    }
    
    - (AVAssetWriterInput *)audioInput {
    
        if(!_audioInput) {
    
            _audioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:self.audioSettings];
    
            _audioInput.expectsMediaDataInRealTime =NO;
    
        }
    
    
    
        return_audioInput;
    
    }
    
    - (NSDictionary *)videoSettings {
    
        if(!_videoSettings) {
    
            _videoSettings =@{AVVideoCodecKey: AVVideoCodecH264,
    
                              AVVideoWidthKey:@(self.targetSize.width),
    
                              AVVideoHeightKey:@(self.targetSize.height),
    
                              AVVideoCompressionPropertiesKey:@{
    
                                      AVVideoAverageBitRateKey:@(6000000),
    
                                      AVVideoProfileLevelKey: AVVideoProfileLevelH264High40
    
                                      },
    
                              };
    
        }
    
    
    
        return_videoSettings;
    
    }
    
    - (NSDictionary *)audioSettings {
    
        if(!_audioSettings) {
    
            _audioSettings =@{AVFormatIDKey:@(kAudioFormatMPEG4AAC),
    
                              AVNumberOfChannelsKey:@(2),
    
                              AVSampleRateKey:@(44100),
    
                              AVEncoderBitRateKey:@(128000),
    
                              };
    
        }
    
    
    
        return_audioSettings;
    
    }
    
    #pragma mark Set Method
    
    @end
    
    
    
    

    相关文章

      网友评论

        本文标题:iOS 自定义AVAssetExportSession

        本文链接:https://www.haomeiwen.com/subject/rsqjlctx.html