美文网首页
AVFoundation

AVFoundation

作者: 周一见丶 | 来源:发表于2018-08-09 10:30 被阅读0次
    - (void)test {
        NSDictionary *options = @{ AVURLAssetPreferPreciseDurationAndTimingKey : @YES };
        NSURL *firstUrl = [[NSBundle mainBundle] URLForResource:@"test1" withExtension:@"MOV"];
        AVURLAsset *firstVideoAsset = [[AVURLAsset alloc] initWithURL:firstUrl options:options];
        NSURL *secondUrl = [[NSBundle mainBundle] URLForResource:@"test2" withExtension:@"MOV"];
        AVURLAsset *secondVideoAsset = [[AVURLAsset alloc] initWithURL:secondUrl options:options];
        
        AVMutableComposition *mutableComposition = [AVMutableComposition composition];
        AVMutableCompositionTrack *videoCompositionTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
        AVMutableCompositionTrack *audioCompositionTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
        
        AVAssetTrack *firstVideoAssetTrack = [[firstVideoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
        AVAssetTrack *secondVideoAssetTrack = [[secondVideoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
        AVAssetTrack *firstAudioAssetTrack = [[firstVideoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
        AVAssetTrack *secondAudioAssetTrack = [[secondVideoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
        
        [videoCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstVideoAssetTrack.timeRange.duration) ofTrack:firstVideoAssetTrack atTime:kCMTimeZero error:nil];
        [videoCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, secondVideoAssetTrack.timeRange.duration) ofTrack:secondVideoAssetTrack atTime:firstVideoAssetTrack.timeRange.duration error:nil];
        [audioCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAudioAssetTrack.timeRange.duration) ofTrack:firstAudioAssetTrack atTime:kCMTimeZero error:nil];
        [audioCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, secondAudioAssetTrack.timeRange.duration) ofTrack:secondAudioAssetTrack atTime:firstAudioAssetTrack.timeRange.duration error:nil];
        
        AVMutableAudioMix *mutableAudioMix = [AVMutableAudioMix audioMix];
        AVMutableAudioMixInputParameters *mixParameters = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:audioCompositionTrack];
        // Set the volume ramp to slowly fade the audio out over the duration of the composition.
        [mixParameters setVolumeRampFromStartVolume:1.f toEndVolume:0.f timeRange:CMTimeRangeMake(kCMTimeZero, mutableComposition.duration)];
        mutableAudioMix.inputParameters = @[mixParameters];
        
        BOOL isFirstVideoPortrait = NO;
        CGAffineTransform firstTransform = firstVideoAssetTrack.preferredTransform;
        // Check the first video track's preferred transform to determine if it was recorded in portrait mode.
        if (firstTransform.a == 0 && firstTransform.d == 0 && (firstTransform.b == 1.0 || firstTransform.b == -1.0) && (firstTransform.c == 1.0 || firstTransform.c == -1.0)) {
            isFirstVideoPortrait = YES;
        }
        BOOL isSecondVideoPortrait = NO;
        CGAffineTransform secondTransform = secondVideoAssetTrack.preferredTransform;
        // Check the second video track's preferred transform to determine if it was recorded in portrait mode.
        if (secondTransform.a == 0 && secondTransform.d == 0 && (secondTransform.b == 1.0 || secondTransform.b == -1.0) && (secondTransform.c == 1.0 || secondTransform.c == -1.0)) {
            isSecondVideoPortrait = YES;
        }
        if ((isFirstVideoPortrait && !isSecondVideoPortrait) || (!isFirstVideoPortrait && isSecondVideoPortrait)) {
            UIAlertView *incompatibleVideoOrientationAlert = [[UIAlertView alloc] initWithTitle:@"Error!" message:@"Cannot combine a video shot in portrait mode with a video shot in landscape mode." delegate:self cancelButtonTitle:@"Dismiss" otherButtonTitles:nil];
            [incompatibleVideoOrientationAlert show];
            return;
        }
    
        AVMutableVideoCompositionInstruction *firstVideoCompositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
        // Set the time range of the first instruction to span the duration of the first video track.
        firstVideoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, firstVideoAssetTrack.timeRange.duration);
        AVMutableVideoCompositionInstruction * secondVideoCompositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
        // Set the time range of the second instruction to span the duration of the second video track.
        secondVideoCompositionInstruction.timeRange = CMTimeRangeMake(firstVideoAssetTrack.timeRange.duration, CMTimeAdd(firstVideoAssetTrack.timeRange.duration, secondVideoAssetTrack.timeRange.duration));
        
        AVMutableVideoCompositionLayerInstruction *firstVideoLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoCompositionTrack];
        // Set the transform of the first layer instruction to the preferred transform of the first video track.
        [firstVideoLayerInstruction setTransform:firstTransform atTime:kCMTimeZero];
        AVMutableVideoCompositionLayerInstruction *secondVideoLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoCompositionTrack];
        // Set the transform of the second layer instruction to the preferred transform of the second video track.
        [secondVideoLayerInstruction setTransform:secondTransform atTime:firstVideoAssetTrack.timeRange.duration];
        firstVideoCompositionInstruction.layerInstructions = @[firstVideoLayerInstruction];
        secondVideoCompositionInstruction.layerInstructions = @[secondVideoLayerInstruction];
        
        AVMutableVideoComposition *mutableVideoComposition = [AVMutableVideoComposition videoComposition];
        mutableVideoComposition.instructions = @[firstVideoCompositionInstruction, secondVideoCompositionInstruction];
        
        CGSize naturalSizeFirst, naturalSizeSecond;
        // If the first video asset was shot in portrait mode, then so was the second one if we made it here.
        if (isFirstVideoPortrait) {
            // Invert the width and height for the video tracks to ensure that they display properly.
            naturalSizeFirst = CGSizeMake(firstVideoAssetTrack.naturalSize.height, firstVideoAssetTrack.naturalSize.width);
            naturalSizeSecond = CGSizeMake(secondVideoAssetTrack.naturalSize.height, secondVideoAssetTrack.naturalSize.width);
        }
        else {
            // If the videos weren't shot in portrait mode, we can just use their natural sizes.
            naturalSizeFirst = firstVideoAssetTrack.naturalSize;
            naturalSizeSecond = secondVideoAssetTrack.naturalSize;
        }
        float renderWidth, renderHeight;
        // Set the renderWidth and renderHeight to the max of the two videos widths and heights.
        if (naturalSizeFirst.width > naturalSizeSecond.width) {
            renderWidth = naturalSizeFirst.width;
        }
        else {
            renderWidth = naturalSizeSecond.width;
        }
        if (naturalSizeFirst.height > naturalSizeSecond.height) {
            renderHeight = naturalSizeFirst.height;
        }
        else {
            renderHeight = naturalSizeSecond.height;
        }
        mutableVideoComposition.renderSize = CGSizeMake(renderWidth, renderHeight);
        // Set the frame duration to an appropriate value (i.e. 30 frames per second for video).
        mutableVideoComposition.frameDuration = CMTimeMake(1,30);
        
        // Create a static date formatter so we only have to initialize it once.
        static NSDateFormatter *kDateFormatter;
        if (!kDateFormatter) {
            kDateFormatter = [[NSDateFormatter alloc] init];
            kDateFormatter.dateStyle = NSDateFormatterMediumStyle;
            kDateFormatter.timeStyle = NSDateFormatterShortStyle;
        }
        // Create the export session with the composition and set the preset to the highest quality.
        AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mutableComposition presetName:AVAssetExportPresetHighestQuality];
        // Set the desired output URL for the file created by the export process.
        NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
        NSString *documentsDirectory = [paths objectAtIndex:0];
        NSString *myPathDocs =  [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"test3.mov"]];
        unlink([myPathDocs UTF8String]);
        NSURL* videoUrl = [NSURL fileURLWithPath:myPathDocs];
        NSFileManager *fm = [NSFileManager defaultManager];
        if ([fm fileExistsAtPath:videoUrl.path]) {
            [fm removeItemAtURL:videoUrl error:nil];
        }
        exporter.outputURL = videoUrl;
        // Set the output file type to be a QuickTime movie.
        exporter.outputFileType = AVFileTypeQuickTimeMovie;
        exporter.shouldOptimizeForNetworkUse = YES;
        exporter.videoComposition = mutableVideoComposition;
        exporter.audioMix = mutableAudioMix;
        // Asynchronously export the composition to a video file and save this file to the camera roll once export completes.
        [exporter exportAsynchronouslyWithCompletionHandler:^{
            dispatch_async(dispatch_get_main_queue(), ^{
                if (exporter.status == AVAssetExportSessionStatusCompleted) {
                  //do something
                }
            });
        }];
    
    
    }
    

    相关文章

      网友评论

          本文标题:AVFoundation

          本文链接:https://www.haomeiwen.com/subject/jarqbftx.html