Как объединить * Single Image * с видео

Я пытаюсь объединить одно видео с одним изображением. Это не попытка объединить много изображений в одно видео, например

Я использую AVMutableComposition для объединения треков. Мое приложение имеет возможность комбинировать видео и изображения (но, в его нынешнем виде, объединение видео в порядке!) Я пытаюсь использовать AVAssetWriter, чтобы превратить одно изображение в видео (я считаю, что это моя проблема, но не 100 % конечно). Затем я сохраняю это в приложении (documents directory). Оттуда я получаю доступ к нему внутри своего слияния и объединяю видео и изображение, которое теперь превратилось в видео.

Поток:

Пользователь выбирает изображение →

Изображение в AVAssetWriter для изменения видео →

Слейте видео, которое я уже установил с видео →

Результат: сделайте 1 видео с выбранного изображения и предустановленного видео.

Проблема с тем, что у меня есть: мой код предоставляет пустое место, где должно быть изображение внутри видео. Как и в файле ImageConverter, который у меня есть, он преобразует его в видео, но я буду видеть только самый LAST-кадр в качестве изображения, в то время как каждый другой кадр прозрачен, как будто изображения там нет. Поэтому, если я конвертирую изображение в видео в течение 5 секунд (допустим, со скоростью 30 кадров/сек), я увижу пустое пространство для кадров (30 * 5) -1, а затем последний кадр, и изображение наконец появится. Я просто ищу руководство о том, как сделать одно изображение в видео ИЛИ, объединить видео и изображение вместе без преобразования изображения в видео. Спасибо!

Слияние файлов здесь

func merge() {
    if let firstAsset = controller.firstAsset, secondAsset = self.asset {

        // 1 - Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
        let mixComposition = AVMutableComposition()

        let firstTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo,
                                                                     preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
        do {
            try firstTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, CMTime(seconds: 8, preferredTimescale: 600)),
                                           ofTrack: firstAsset.tracksWithMediaType(AVMediaTypeVideo)[0] ,
                                           atTime: kCMTimeZero)
        } catch _ {
            print("Failed to load first track")
        }

        do {
            //HERE THE TIME IS 0.666667, BUT SHOULD BE 0
            print(CMTimeGetSeconds(secondAsset.duration), CMTimeGetSeconds(firstTrack.timeRange.duration))
            try firstTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, secondAsset.duration),
                                            ofTrack: secondAsset.tracksWithMediaType(AVMediaTypeVideo)[0],
                                            atTime: firstTrack.timeRange.duration)
        } catch _ {
            print("Failed to load second track")
        }
        do {
            try firstTrack.insertTimeRange(CMTimeRangeMake(CMTime(seconds: 8+CMTimeGetSeconds(secondAsset.duration), preferredTimescale: 600), firstAsset.duration),
                                           ofTrack: firstAsset.tracksWithMediaType(AVMediaTypeVideo)[0] ,
                                           atTime: firstTrack.timeRange.duration+secondTrack.timeRange.duration)
        } catch _ {
            print("failed")
        }

        // 3 - Audio track
        if let loadedAudioAsset = controller.audioAsset {
            let audioTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: 0)
            do {
                try audioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, firstAsset.duration),
                                               ofTrack: loadedAudioAsset.tracksWithMediaType(AVMediaTypeAudio)[0] ,
                                               atTime: kCMTimeZero)
            } catch _ {
                print("Failed to load Audio track")
            }
        }

        // 4 - Get path
        let documentDirectory = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0]
        let dateFormatter = NSDateFormatter()
        dateFormatter.dateStyle = .LongStyle
        dateFormatter.timeStyle = .ShortStyle
        let date = dateFormatter.stringFromDate(NSDate())
        let savePath = (documentDirectory as NSString).stringByAppendingPathComponent("mergeVideo.mov")
        let url = NSURL(fileURLWithPath: savePath)
        _ = try? NSFileManager().removeItemAtURL(url)

        // 5 - Create Exporter
        print("exporting")
        guard let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) else { return }
        exporter.outputURL = url
        exporter.outputFileType = AVFileTypeQuickTimeMovie
        exporter.shouldOptimizeForNetworkUse = false
        exporter.videoComposition = mainComposition

        // 6 - Perform the Export
        controller.currentlyEditing = true
        exporter.exportAsynchronouslyWithCompletionHandler() {
            dispatch_async(dispatch_get_main_queue()) { _ in
                print("done")
                self.controller.currentlyEditing = false
                self.controller.merged = true
                self.button.blurView.superview?.hidden = true
                self.controller.player.replaceCurrentItemWithPlayerItem(AVPlayerItem(URL: url))
                self.controller.firstAsset = AVAsset(URL: url)
            }
        }
    }
}
func exportDidFinish(session: AVAssetExportSession) {
    if session.status == AVAssetExportSessionStatus.Failed {
        print(session.error)
    }
    if session.status == AVAssetExportSessionStatus.Completed {
        print("succed")
    }
}

Преобразование изображения здесь

class MyConverter: NSObject {

    var image:UIImage!

    convenience init(image:UIImage) {
        self.init()
        self.image = image
    }

    var outputURL: NSURL {
        let documentDirectory = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0]
        let savePath = (documentDirectory as NSString).stringByAppendingPathComponent("mergeVideo-pic.mov")
        return getURL(savePath)
    }

    func getURL(path:String) -> NSURL {
        let movieDestinationUrl = NSURL(fileURLWithPath: path)
        _ = try? NSFileManager().removeItemAtURL(movieDestinationUrl)
        let url = NSURL(fileURLWithPath: path)
        return url
    }

    func build(completion:() -> Void) {
        guard let videoWriter = try? AVAssetWriter(URL: outputURL, fileType: AVFileTypeQuickTimeMovie) else {
            fatalError("AVAssetWriter error")
        }
        let outputSettings = [AVVideoCodecKey : AVVideoCodecH264, AVVideoWidthKey : NSNumber(float: Float(image.size.width)), AVVideoHeightKey : NSNumber(float: Float(image.size.height))]

        guard videoWriter.canApplyOutputSettings(outputSettings, forMediaType: AVMediaTypeVideo) else {
            fatalError("Negative : Can't apply the Output settings...")
        }

        let videoWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: outputSettings)
        let sourcePixelBufferAttributesDictionary = [kCVPixelBufferPixelFormatTypeKey as String : NSNumber(unsignedInt: kCVPixelFormatType_32ARGB), kCVPixelBufferWidthKey as String: NSNumber(float: Float(image.size.width)), kCVPixelBufferHeightKey as String: NSNumber(float: Float(image.size.height))]
        let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriterInput, sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary)

        if videoWriter.canAddInput(videoWriterInput) {
            videoWriter.addInput(videoWriterInput)
        }

        if videoWriter.startWriting() {
            videoWriter.startSessionAtSourceTime(kCMTimeZero)
            assert(pixelBufferAdaptor.pixelBufferPool != nil)
        }

        let media_queue = dispatch_queue_create("mediaInputQueue", nil)

        videoWriterInput.requestMediaDataWhenReadyOnQueue(media_queue, usingBlock: { () -> Void in
            var appendSucceeded = true
            //Time HERE IS ZERO, but in Merge file, it is 0.66667
            let presentationTime = CMTimeMake(0, 600)

            var pixelBuffer: CVPixelBuffer? = nil
            let status: CVReturn = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferAdaptor.pixelBufferPool!, &pixelBuffer)

            if let pixelBuffer = pixelBuffer where status == 0 {
                let managedPixelBuffer = pixelBuffer
            CVPixelBufferLockBaseAddress(managedPixelBuffer, 0)

                let data = CVPixelBufferGetBaseAddress(managedPixelBuffer)
                let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
                let context = CGBitmapContextCreate(data, Int(self.image.size.width), Int(self.image.size.height), 8, CVPixelBufferGetBytesPerRow(managedPixelBuffer), rgbColorSpace, CGImageAlphaInfo.PremultipliedFirst.rawValue)

                CGContextClearRect(context, CGRectMake(0, 0, CGFloat(self.image.size.width), CGFloat(self.image.size.height)))


                CGContextDrawImage(context, CGRectMake(0, 0, self.image.size.width, self.image.size.height), self.image.CGImage)

                CVPixelBufferUnlockBaseAddress(managedPixelBuffer, 0)

                appendSucceeded =     pixelBufferAdaptor.appendPixelBuffer(pixelBuffer, withPresentationTime: presentationTime)
            } else {
                print("Failed to allocate pixel buffer")
                appendSucceeded = false
            }
            if !appendSucceeded {
                print("append failed")
            }
            videoWriterInput.markAsFinished()
            videoWriter.finishWritingWithCompletionHandler { () -> Void in
                print("FINISHED!!!!!")
                completion()
            }
        })
    }
}

Примечание: Я выяснил, что если я сделаю print(presentationTime) INSIDE ImageConverter, он напечатает 0, а затем распечатает время продолжительности внутри слияния, я получаю 0.666667

Примечание: Пока нет ответов, но я буду постоянно задавать этот вопрос щедростью, пока не найду ответ, или кто-то другой не поможет мне! Спасибо!

Ответ 1

Правильно, поэтому я действительно занимался этой проблемой некоторое время назад. Проблема заключается в том, как вы создаете видео с картинки. Что вам нужно сделать, это добавить буфер пикселя в нулевое время, а затем перейти в конец, иначе вы получите пустое видео до самого последнего кадра, как вы переживаете.

Следующий код будет моей лучшей попыткой обновить ваш код. В самом конце я отправлю свое решение, которое находится в Objective-C, если оно поможет кому-либо еще.

func build(completion:() -> Void) {
    guard let videoWriter = try? AVAssetWriter(URL: outputURL, fileType: AVFileTypeQuickTimeMovie) else {
        fatalError("AVAssetWriter error")
    }

    // This might not be a problem for you but width HAS to be divisible by 16 or the movie will come out distorted... don't ask me why. So this is a safeguard
    let pixelsToRemove: Double = fmod(image.size.width, 16)
    let pixelsToAdd: Double = 16 - pixelsToRemove
    let size: CGSize = CGSizeMake(image.size.width + pixelsToAdd, image.size.height)

    let outputSettings = [AVVideoCodecKey : AVVideoCodecH264, AVVideoWidthKey : NSNumber(float: Float(size.width)), AVVideoHeightKey : NSNumber(float: Float(size.height))]

    guard videoWriter.canApplyOutputSettings(outputSettings, forMediaType: AVMediaTypeVideo) else {
        fatalError("Negative : Can't apply the Output settings...")
    }

    let videoWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: outputSettings)
    let sourcePixelBufferAttributesDictionary = [kCVPixelBufferPixelFormatTypeKey as String : NSNumber(unsignedInt: kCVPixelFormatType_32ARGB), kCVPixelBufferWidthKey as String: NSNumber(float: Float(size.width)), kCVPixelBufferHeightKey as String: NSNumber(float: Float(size.height))]
    let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriterInput, sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary)

    if videoWriter.canAddInput(videoWriterInput) {
        videoWriter.addInput(videoWriterInput)
    }

    if videoWriter.startWriting() {
        videoWriter.startSessionAtSourceTime(kCMTimeZero)
        assert(pixelBufferAdaptor.pixelBufferPool != nil)
    }

    // For simplicity, I'm going to remove the media queue you created and instead explicitly wait until I can append since i am only writing one pixel buffer at two different times

    var pixelBufferCreated = true
    var pixelBuffer: CVPixelBuffer? = nil
    let status: CVReturn = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferAdaptor.pixelBufferPool!, &pixelBuffer)

    if let pixelBuffer = pixelBuffer where status == 0 {
        let managedPixelBuffer = pixelBuffer
        CVPixelBufferLockBaseAddress(managedPixelBuffer, 0)

        let data = CVPixelBufferGetBaseAddress(managedPixelBuffer)
        let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
        let context = CGBitmapContextCreate(data, Int(size.width), Int(size.height), 8, CVPixelBufferGetBytesPerRow(managedPixelBuffer), rgbColorSpace, CGImageAlphaInfo.PremultipliedFirst.rawValue)

        CGContextClearRect(context, CGRectMake(0, 0, CGFloat(size.width), CGFloat(size.height)))

        CGContextDrawImage(context, CGRectMake(0, 0, size.width, size.height), self.image.CGImage)

        CVPixelBufferUnlockBaseAddress(managedPixelBuffer, 0)
    } else {
        print("Failed to allocate pixel buffer")
        pixelBufferCreated = false
    }

    if (pixelBufferCreated) {
        // Here is where the magic happens, we have our pixelBuffer it time to start writing

        // FIRST - add at time zero
        var appendSucceeded = pixelBufferAdaptor.appendPixelBuffer(pixelBuffer, withPresentationTime: kCMTimeZero];
        if (!appendSucceeded) {
            // something went wrong, up to you to handle. Should probably return so the rest of the code is not executed though
        }
        // SECOND - wait until the writer is ready for more data with an empty while
        while !writerInput.readyForMoreMediaData {} 

        // THIRD - make a CMTime with the desired length of your picture-video. I am going to arbitrarily make it 5 seconds here
        let frameTime: CMTime = CMTimeMake(5, 1) // 5 seconds

        // FOURTH - add the same exact pixel to the end of the video you are creating
        appendSucceeded = pixelBufferAdaptor.appendPixelBuffer(pixelBuffer, withPresentationTime: frameTime];
        if (!appendSucceeded) {
            // something went wrong, up to you to handle. Should probably return so the rest of the code is not executed though
        }

        videoWriterInput.markAsFinished() {
            videoWriter.endSessionAtSourceTime(frameTime)
        }
        videoWriter.finishWritingWithCompletionHandler { () -> Void in
            if videoWriter.status != .Completed {
                // Error writing the video... handle appropriately 
            } else {
                print("FINISHED!!!!!")
                completion()
            }
        }
    }
}

Как мне удалось это сделать в Obj-C

Примечание. Мне пришлось внести некоторые изменения, чтобы сделать это автономным, поэтому этот метод вернет строку, содержащую путь, на который записывается видео WILL BE. Он возвращается до завершения записи видео, поэтому возможно получить доступ к нему, прежде чем он будет готов, если вы не будете осторожны.

-(NSString *)makeMovieFromImageData:(NSData *)imageData {
    NSError *error;
    UIImage *image = [UIImage imageWithData:imageData];


    // width has to be divisible by 16 or the movie comes out distorted... don't ask me why
    double pixelsToRemove = fmod(image.size.width, 16);

    double pixelsToAdd = 16 - pixelsToRemove; 

    CGSize size = CGSizeMake(image.size.width+pixelsToAdd, image.size.height);

    BOOL hasFoundValidPath = NO;
    NSURL *tempFileURL;
    NSString *outputFile;

    while (!hasFoundValidPath) {

        NSString *guid = [[NSUUID new] UUIDString];
        outputFile = [NSString stringWithFormat:@"picture_%@.mp4", guid];

        NSString *outputDirectory = [NSSearchPathForDirectoriesInDomains(NSTemporaryDirectory, NSUserDomainMask, YES) objectAtIndex:0];

        NSString *tempPath = [outputDirectory stringByAppendingPathComponent:outputFile];

        // Will fail if destination already has a file
        if ([[NSFileManager defaultManager] fileExistsAtPath:tempPath]) {
            continue;
        } else {
            hasFoundValidPath = YES;
        }
        tempFileURL = [NSURL fileURLWithPath:tempPath];
    }


    // Start writing
    AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:tempFileURL
                                                           fileType:AVFileTypeQuickTimeMovie
                                                              error:&error];

    if (error) {
       // handle error
    }

    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   AVVideoCodecH264, AVVideoCodecKey,
                                   [NSNumber numberWithInt:size.width], AVVideoWidthKey,
                                   [NSNumber numberWithInt:size.height], AVVideoHeightKey,
                                   nil];

    AVAssetWriterInput* writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
                                                                         outputSettings:videoSettings];

    NSDictionary *bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:
                                      [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];

    AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
                                                                                                                     sourcePixelBufferAttributes:bufferAttributes];
    if ([videoWriter canAddInput:writerInput]) {
        [videoWriter addInput:writerInput];
    } else {
        // handle error
    }

    [videoWriter startWriting];

    [videoWriter startSessionAtSourceTime:kCMTimeZero];

    CGImageRef img = [image CGImage];

    // Now I am going to create the bixelBuffer
    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                            [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
                            [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
                            nil];
    CVPixelBufferRef buffer = NULL;

    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width,
                                          size.height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,
                                          &pxbuffer);

    if ( !(status == kCVReturnSuccess && pxbuffer != NULL) ) {
        NSLog(@"There be some issue. We didn't get a buffer from the image");
    }


    CVPixelBufferLockBaseAddress(buffer, 0);
    void *pxdata = CVPixelBufferGetBaseAddress(buffer);

    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();

    CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
                                                 size.height, 8, 4*size.width, rgbColorSpace,
                                                 (CGBitmapInfo)kCGImageAlphaPremultipliedFirst);
    CGContextSetRGBFillColor(context, 0, 0, 0, 0);

    CGContextConcatCTM(context, CGAffineTransformIdentity);

    CGContextDrawImage(context, CGRectMake(0, 0, size.width,
                                           size.height), image);
    CGColorSpaceRelease(rgbColorSpace);
    CGContextRelease(context);

    CVPixelBufferUnlockBaseAddress(buffer, 0);

    // At this point we have our buffer so we are going to start by adding to time zero

    [adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];

    while (!writerInput.readyForMoreMediaData) {} // wait until ready

    CMTime frameTime = CMTimeMake(5, 1); // 5 second frame

    [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
    CFRelease(buffer);

    [writerInput markAsFinished];

    [videoWriter endSessionAtSourceTime:frameTime];

    [videoWriter finishWritingWithCompletionHandler:^{
        if (videoWriter.status != AVAssetWriterStatusCompleted) {
            // Error
        }
    }]; // end videoWriter finishWriting Block

    // NOTE: the URL is actually being returned before the videoWriter finishes writing so be careful to not access it until it ready
    return outputFile;
}

Ответ 2

Здесь он работает для меня, я надеюсь, что это вам полезно: -

-(void)MixVideo:(NSString *)vidioUrlString withImage:(UIImage *)img
{
    NSURL *videoUrl1 = [[NSURL alloc] initFileURLWithPath:vidioUrlString];
    AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:videoUrl1 options:nil];

    AVMutableComposition* mixComposition = [AVMutableComposition composition];

    AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];

    AVAssetTrack *clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];

    AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];

    AVAssetTrack *clipAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];


    [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipVideoTrack atTime:kCMTimeZero error:nil];

    [compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];

    [compositionVideoTrack setPreferredTransform:[[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] preferredTransform]];

    CGSize sizeOfVideo = CGSizeMake(320, 568);

    //Image of watermark
    UIImage *myImage=img;

    CALayer *layerCa = [CALayer layer];

    layerCa.contents = (id)myImage.CGImage;
    layerCa.frame = CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);

    layerCa.opacity = 1.0;

    CALayer *parentLayer=[CALayer layer];

    CALayer *videoLayer=[CALayer layer];

    parentLayer.frame=CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);

    videoLayer.frame=CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);
    [parentLayer addSublayer:videoLayer];

    [parentLayer addSublayer:layerCa];

    AVMutableVideoComposition *videoComposition=[AVMutableVideoComposition videoComposition] ;

    videoComposition.frameDuration=CMTimeMake(1, 30);

    videoComposition.renderSize=sizeOfVideo;

    videoComposition.animationTool=[AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];

    AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];

    instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);

    AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];

    AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];

    instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];

    videoComposition.instructions = [NSArray arrayWithObject: instruction];

    NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)objectAtIndex:0];

    finalPath = [documentsDirectory stringByAppendingFormat:@"/myVideo.mp4"];

    if ([[NSFileManager defaultManager] fileExistsAtPath:finalPath])
    {
        [[NSFileManager defaultManager] removeItemAtPath:finalPath error:nil];
    }

    SDAVAssetExportSession *encoder = [SDAVAssetExportSession.alloc initWithAsset:mixComposition];
    encoder.outputFileType = AVFileTypeMPEG4;
    encoder.outputURL = [NSURL fileURLWithPath:finalPath];
    encoder.videoComposition=videoComposition;
    encoder.videoSettings = @
    {
    AVVideoCodecKey: AVVideoCodecH264,
    AVVideoWidthKey: @320,
    AVVideoHeightKey: @568,
    AVVideoCompressionPropertiesKey: @
        {
        AVVideoAverageBitRateKey: @900000,
        AVVideoProfileLevelKey: AVVideoProfileLevelH264MainAutoLevel,
        },
    };
    encoder.audioSettings = @
    {
    AVFormatIDKey: @(kAudioFormatMPEG4AAC),
    AVNumberOfChannelsKey: @2,
    AVSampleRateKey: @44100,
    AVEncoderBitRateKey: @128000,
    };

    [encoder exportAsynchronouslyWithCompletionHandler:^
     {

         if (encoder.status == AVAssetExportSessionStatusCompleted)
         {

             NSLog(@"Video export succeeded");
             if (UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(finalPath))
             {

                 NSLog(@"Video exported successfully path = %@ ",finalPath);
             }

         }
         else if (encoder.status == AVAssetExportSessionStatusCancelled)
         {
             NSLog(@"Video export cancelled");
         }
         else
         {
             NSLog(@"Video export failed with error: %@ (%ld)", encoder.error.localizedDescription, (long)encoder.error.code);
         }
     }];

}

Ответ 3

Здесь он работает для меня, экспортирует одно изображение в видео (видео можно перемещать, а не статично). Swift 3.

//
//  CXEImageToAssetURL.swift
//  CXEngine
//
//  Created by wulei on 16/12/14.
//  Copyright © 2016年 wulei. All rights reserved.
//

import Foundation
import AVFoundation
import UIKit
import Photos

fileprivate extension UIImage{
    func normalizedImage() -> UIImage?{
//        if self.imageOrientation == .up{
//            return self
//        }
        let factor = CGFloat(0.8)
        UIGraphicsBeginImageContextWithOptions(CGSize(width:self.size.width * factor, height: self.size.height * factor), false, self.scale)
        self.draw(in: CGRect(x: 0, y: 0, width: self.size.width * factor, height: self.size.height * factor))
        let normalImage = UIGraphicsGetImageFromCurrentImageContext()
        UIGraphicsEndImageContext()
        return normalImage
    }

//    func clipImage() -> UIImage {

//        var x = CGFloat(0)
//        var y = CGFloat(0)
//        let imageHeight = (self.size.width * 9) / 16
//        y = (self.size.height - imageHeight)/2
//        var rcTmp = CGRect(origin: CGPoint(x: x, y: y), size: self.size)
//        if self.scale > 1.0 {
//            rcTmp = CGRect(x: rcTmp.origin.x * self.scale, y: rcTmp.origin.y * self.scale, width: rcTmp.size.width * self.scale, height: rcTmp.size.height * self.scale)
//        }
//        rcTmp.size.height = imageHeight
//        let imageRef = self.cgImage!.cropping(to: rcTmp)
//        let result = UIImage(cgImage: imageRef!, scale: self.scale, orientation: self.imageOrientation)
//        return result
//        return self
//    }
}

public typealias CXEImageToVideoProgress = (Float) -> Void
typealias CXEMovieMakerUIImageExtractor = (AnyObject) -> UIImage?


public class CXEImageToVideo: NSObject{

    //MARK: Private Properties

    private var assetWriter:AVAssetWriter!
    private var writeInput:AVAssetWriterInput!
    private var bufferAdapter:AVAssetWriterInputPixelBufferAdaptor!
    private var videoSettings:[String : Any]!
    private var frameTime:CMTime!
    private var fileURL:URL!
    private var duration:Int = 0

    //MARK: Class Method

     private func videoSettingsFunc(width:Int, height:Int) -> [String: Any]{
        if(Int(width) % 16 != 0){
            print("warning: video settings width must be divisible by 16")
        }

        let videoSettings:[String: Any] = [AVVideoCodecKey: AVVideoCodecH264,
                                           AVVideoWidthKey: width,
                                           AVVideoHeightKey: height]

        return videoSettings
    }

    //MARK: Public methods

    public init(fileURL: URL, videoWidth:Int, videoHeight:Int) {
        super.init()

        self.videoSettings = videoSettingsFunc(width: videoWidth, height: videoHeight)

        self.fileURL = fileURL
        self.assetWriter = try! AVAssetWriter(url: self.fileURL, fileType: AVFileTypeQuickTimeMovie)

        self.writeInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)
        assert(self.assetWriter.canAdd(self.writeInput), "add failed")

        self.assetWriter.add(self.writeInput)
        let bufferAttributes:[String: Any] = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32ARGB)]
        self.bufferAdapter = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: self.writeInput, sourcePixelBufferAttributes: bufferAttributes)
        self.frameTime = CMTimeMake(1, 25)
    }

//    public func createMovieFrom(url: URL, duration:Int, progressExtractor: CXEImageToVideoProgress){
//        self.duration = duration
//        self.createMovieFromSource(image: url as AnyObject, extractor:{(inputObject:AnyObject) ->UIImage? in
//            return UIImage(data: try! Data(contentsOf: inputObject as! URL))}, progressExtractor: progressExtractor)
//    }

    public func createMovieFrom(imageData: Data, duration:Int, progressExtractor: CXEImageToVideoProgress){
        var image = UIImage(data: imageData)
        image = image?.normalizedImage()
        assert(image != nil)
        self.duration = duration

        self.createMovieFromSource(image: image!, extractor: {(inputObject:AnyObject) -> UIImage? in
            return inputObject as? UIImage}, progressExtractor: progressExtractor)
    }

    //MARK: Private methods

    private func createMovieFromSource(image: AnyObject, extractor: @escaping CXEMovieMakerUIImageExtractor, progressExtractor: CXEImageToVideoProgress){

        self.assetWriter.startWriting()
        let zeroTime = CMTimeMake(Int64(0),self.frameTime.timescale)
        self.assetWriter.startSession(atSourceTime: zeroTime)

        while !self.writeInput.isReadyForMoreMediaData {
            usleep(100)
        }

        var sampleBuffer:CVPixelBuffer?
        var pxDataBuffer:CVPixelBuffer?
        let img = extractor(image)
        assert(img != nil)

        let options:[String: Any] = [kCVPixelBufferCGImageCompatibilityKey as String: true, kCVPixelBufferCGBitmapContextCompatibilityKey as String: true]
        let frameHeight = self.videoSettings[AVVideoHeightKey] as! Int
        let frameWidth = self.videoSettings[AVVideoWidthKey] as! Int
        let originHeight = frameWidth * img!.cgImage!.height / img!.cgImage!.width
        let heightDifference = originHeight - frameHeight

        let frameCounts = self.duration * Int(self.frameTime.timescale)
        let spacingOfHeight = heightDifference / frameCounts

        sampleBuffer = self.newPixelBufferFrom(cgImage: img!.cgImage!)
        assert(sampleBuffer != nil)

        var presentTime = CMTimeMake(1, self.frameTime.timescale)
        var stepRows = 0

        for i in 0..<frameCounts {
            progressExtractor(Float(i) / Float(frameCounts))

            CVPixelBufferLockBaseAddress(sampleBuffer!, CVPixelBufferLockFlags(rawValue: 0))
            let pointer = CVPixelBufferGetBaseAddress(sampleBuffer!)
            var pxData = pointer?.assumingMemoryBound(to: UInt8.self)
            let bytes = CVPixelBufferGetBytesPerRow(sampleBuffer!) * stepRows
            pxData = pxData?.advanced(by: bytes)

            let status = CVPixelBufferCreateWithBytes(kCFAllocatorDefault, frameWidth, frameHeight, kCVPixelFormatType_32ARGB, pxData!, CVPixelBufferGetBytesPerRow(sampleBuffer!), nil, nil, options as CFDictionary?, &pxDataBuffer)
            assert(status == kCVReturnSuccess && pxDataBuffer != nil, "newPixelBuffer failed")
            CVPixelBufferUnlockBaseAddress(sampleBuffer!, CVPixelBufferLockFlags(rawValue: 0))

            while !self.writeInput.isReadyForMoreMediaData {
                usleep(100)
            }
            if (self.writeInput.isReadyForMoreMediaData){
                if i == 0{
                    self.bufferAdapter.append(pxDataBuffer!, withPresentationTime: zeroTime)
                }else{
                    self.bufferAdapter.append(pxDataBuffer!, withPresentationTime: presentTime)
                }
                presentTime = CMTimeAdd(presentTime, self.frameTime)
            }

            stepRows += spacingOfHeight
        }


        self.writeInput.markAsFinished()
        self.assetWriter.finishWriting {}

        var isSuccess:Bool = false
        while(!isSuccess){
            switch self.assetWriter.status {
            case .completed:
                isSuccess = true
                print("completed")
            case .writing:
                usleep(100)
                print("writing")
            case .failed:
                isSuccess = true
                print("failed")
            case .cancelled:
                isSuccess = true
                print("cancelled")
            default:
                isSuccess = true
                print("unknown")
            }
        }
    }

    private func newPixelBufferFrom(cgImage:CGImage) -> CVPixelBuffer?{
        let options:[String: Any] = [kCVPixelBufferCGImageCompatibilityKey as String: true, kCVPixelBufferCGBitmapContextCompatibilityKey as String: true]
        var pxbuffer:CVPixelBuffer?
        let frameWidth = self.videoSettings[AVVideoWidthKey] as! Int
        let frameHeight = self.videoSettings[AVVideoHeightKey] as! Int

        let originHeight = frameWidth * cgImage.height / cgImage.width

        let status = CVPixelBufferCreate(kCFAllocatorDefault, frameWidth, originHeight, kCVPixelFormatType_32ARGB, options as CFDictionary?, &pxbuffer)
        assert(status == kCVReturnSuccess && pxbuffer != nil, "newPixelBuffer failed")

        CVPixelBufferLockBaseAddress(pxbuffer!, CVPixelBufferLockFlags(rawValue: 0))
        let pxdata = CVPixelBufferGetBaseAddress(pxbuffer!)
        let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
        let context = CGContext(data: pxdata, width: frameWidth, height: originHeight, bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(pxbuffer!), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue)
        assert(context != nil, "context is nil")

        context!.concatenate(CGAffineTransform.identity)
        context!.draw(cgImage, in: CGRect(x: 0, y: 0, width: frameWidth, height: originHeight))
        CVPixelBufferUnlockBaseAddress(pxbuffer!, CVPixelBufferLockFlags(rawValue: 0))
        return pxbuffer
    }
}

Ответ 4

Код Swift 3, который должен быть хорошим началом. При использовании в производстве все еще требуется обработка ошибок и обработка размера или ориентации видео для некоторых видеороликов.

@discardableResult func merge(
    video videoPath: String,
    withForegroundImage foregroundImage: UIImage,
    completion: @escaping (AVAssetExportSession) -> Void) -> AVAssetExportSession {

    let videoUrl = URL(fileURLWithPath: videoPath)
    let videoUrlAsset = AVURLAsset(url: videoUrl, options: nil)

    // Setup `mutableComposition` from the existing video
    let mutableComposition = AVMutableComposition()
    let videoAssetTrack = videoUrlAsset.tracks(withMediaType: AVMediaTypeVideo).first!
    let videoCompositionTrack = mutableComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
    videoCompositionTrack.preferredTransform = videoAssetTrack.preferredTransform
    try! videoCompositionTrack.insertTimeRange(CMTimeRange(start:kCMTimeZero, duration:videoAssetTrack.timeRange.duration), of: videoAssetTrack, at: kCMTimeZero)
    let audioAssetTrack = videoUrlAsset.tracks(withMediaType: AVMediaTypeAudio).first!
    let audioCompositionTrack = mutableComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
    try! audioCompositionTrack.insertTimeRange(CMTimeRange(start: kCMTimeZero, duration:audioAssetTrack.timeRange.duration), of: audioAssetTrack, at: kCMTimeZero)

    // Create a `videoComposition` to represent the `foregroundImage`
    let videoSize: CGSize = videoCompositionTrack.naturalSize
    let frame = CGRect(x: 0.0, y: 0.0, width: videoSize.width, height: videoSize.height)
    let imageLayer = CALayer()
    imageLayer.contents = foregroundImage.cgImage
    imageLayer.frame = frame
    let videoLayer = CALayer()
    videoLayer.frame = frame
    let animationLayer = CALayer()
    animationLayer.frame = frame
    animationLayer.addSublayer(videoLayer)
    animationLayer.addSublayer(imageLayer)
    let videoComposition = AVMutableVideoComposition(propertiesOf: videoCompositionTrack.asset!)
    videoComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: animationLayer)

    // Export the video
    let documentDirectory = NSSearchPathForDirectoriesInDomains(FileManager.SearchPathDirectory.cachesDirectory, FileManager.SearchPathDomainMask.userDomainMask, true).first!
    let documentDirectoryUrl = URL(fileURLWithPath: documentDirectory)
    let destinationFilePath = documentDirectoryUrl.appendingPathComponent("video_\(NSUUID().uuidString).mov")
    let exportSession = AVAssetExportSession( asset: mutableComposition, presetName: AVAssetExportPresetHighestQuality)!
    exportSession.videoComposition = videoComposition
    exportSession.outputURL = destinationFilePath
    exportSession.outputFileType = AVFileTypeQuickTimeMovie
    exportSession.exportAsynchronously { [weak exportSession] in
        if let strongExportSession = exportSession {
            completion(strongExportSession)
        }
    }

    return exportSession
}