create movie from [UIImage], Swift

I convert the objective-c code that posted by ’@Cameron E‘ to Swift 3, and It's working. the answer's link:@Cameron E's CEMovieMaker

following is CXEImagesToVideo class:

//
//  CXEImagesToVideo.swift
//  VideoAPPTest
//
//  Created by Wulei on 16/12/14.
//  Copyright © 2016 wulei. All rights reserved.
//

import Foundation
import AVFoundation
import UIKit

typealias CXEMovieMakerCompletion = (URL) -> Void
typealias CXEMovieMakerUIImageExtractor = (AnyObject) -> UIImage?


public class CXEImagesToVideo: NSObject{
    var assetWriter:AVAssetWriter!
    var writeInput:AVAssetWriterInput!
    var bufferAdapter:AVAssetWriterInputPixelBufferAdaptor!
    var videoSettings:[String : Any]!
    var frameTime:CMTime!
    var fileURL:URL!

    var completionBlock: CXEMovieMakerCompletion?
    var movieMakerUIImageExtractor:CXEMovieMakerUIImageExtractor?


public class func videoSettings(codec:String, width:Int, height:Int) -> [String: Any]{
    if(Int(width) % 16 != 0){
        print("warning: video settings width must be divisible by 16")
    }

    let videoSettings:[String: Any] = [AVVideoCodecKey: AVVideoCodecH264,
                         AVVideoWidthKey: width,
                         AVVideoHeightKey: height]

    return videoSettings
}

public init(videoSettings: [String: Any]) {
    super.init()

    let paths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
    let tempPath = paths[0] + "/exprotvideo.mp4"
    if(FileManager.default.fileExists(atPath: tempPath)){
        guard (try? FileManager.default.removeItem(atPath: tempPath)) != nil else {
            print("remove path failed")
            return
        }
    }

    self.fileURL = URL(fileURLWithPath: tempPath)
    self.assetWriter = try! AVAssetWriter(url: self.fileURL, fileType: AVFileTypeQuickTimeMovie)

    self.videoSettings = videoSettings
    self.writeInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)
    assert(self.assetWriter.canAdd(self.writeInput), "add failed")

    self.assetWriter.add(self.writeInput)
    let bufferAttributes:[String: Any] = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32ARGB)]
    self.bufferAdapter = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: self.writeInput, sourcePixelBufferAttributes: bufferAttributes)
    self.frameTime = CMTimeMake(1, 10)
}

func createMovieFrom(urls: [URL], withCompletion: @escaping CXEMovieMakerCompletion){
    self.createMovieFromSource(images: urls as [AnyObject], extractor:{(inputObject:AnyObject) ->UIImage? in
        return UIImage(data: try! Data(contentsOf: inputObject as! URL))}, withCompletion: withCompletion)
}

func createMovieFrom(images: [UIImage], withCompletion: @escaping CXEMovieMakerCompletion){
    self.createMovieFromSource(images: images, extractor: {(inputObject:AnyObject) -> UIImage? in
    return inputObject as? UIImage}, withCompletion: withCompletion)
}

func createMovieFromSource(images: [AnyObject], extractor: @escaping CXEMovieMakerUIImageExtractor, withCompletion: @escaping CXEMovieMakerCompletion){
    self.completionBlock = withCompletion

    self.assetWriter.startWriting()
    self.assetWriter.startSession(atSourceTime: kCMTimeZero)

    let mediaInputQueue = DispatchQueue(label: "mediaInputQueue")
    var i = 0
    let frameNumber = images.count

    self.writeInput.requestMediaDataWhenReady(on: mediaInputQueue){
        while(true){
            if(i >= frameNumber){
                break
            }

            if (self.writeInput.isReadyForMoreMediaData){
                var sampleBuffer:CVPixelBuffer?
                autoreleasepool{
                    let img = extractor(images[i])
                    if img == nil{
                        i += 1
                        print("Warning: counld not extract one of the frames")
                        //continue
                    }
                    sampleBuffer = self.newPixelBufferFrom(cgImage: img!.cgImage!)
                }
                if (sampleBuffer != nil){
                    if(i == 0){
                        self.bufferAdapter.append(sampleBuffer!, withPresentationTime: kCMTimeZero)
                    }else{
                        let value = i - 1
                        let lastTime = CMTimeMake(Int64(value), self.frameTime.timescale)
                        let presentTime = CMTimeAdd(lastTime, self.frameTime)
                        self.bufferAdapter.append(sampleBuffer!, withPresentationTime: presentTime)
                    }
                    i = i + 1
                }
            }
        }
        self.writeInput.markAsFinished()
        self.assetWriter.finishWriting {
            DispatchQueue.main.sync {
                self.completionBlock!(self.fileURL)
            }
        }
    }
}

func newPixelBufferFrom(cgImage:CGImage) -> CVPixelBuffer?{
    let options:[String: Any] = [kCVPixelBufferCGImageCompatibilityKey as String: true, kCVPixelBufferCGBitmapContextCompatibilityKey as String: true]
    var pxbuffer:CVPixelBuffer?
    let frameWidth = self.videoSettings[AVVideoWidthKey] as! Int
    let frameHeight = self.videoSettings[AVVideoHeightKey] as! Int

    let status = CVPixelBufferCreate(kCFAllocatorDefault, frameWidth, frameHeight, kCVPixelFormatType_32ARGB, options as CFDictionary?, &pxbuffer)
    assert(status == kCVReturnSuccess && pxbuffer != nil, "newPixelBuffer failed")

    CVPixelBufferLockBaseAddress(pxbuffer!, CVPixelBufferLockFlags(rawValue: 0))
    let pxdata = CVPixelBufferGetBaseAddress(pxbuffer!)
    let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
    let context = CGContext(data: pxdata, width: frameWidth, height: frameHeight, bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(pxbuffer!), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue)
    assert(context != nil, "context is nil")

    context!.concatenate(CGAffineTransform.identity)
    context!.draw(cgImage, in: CGRect(x: 0, y: 0, width: cgImage.width, height: cgImage.height))
    CVPixelBufferUnlockBaseAddress(pxbuffer!, CVPixelBufferLockFlags(rawValue: 0))
    return pxbuffer
}
}

Usage:

        var uiImages = [UIImage]()

    /** add image to uiImages */

    let settings = CXEImagesToVideo.videoSettings(codec: AVVideoCodecH264, width: (uiImages[0].cgImage?.width)!, height: (uiImages[0].cgImage?.height)!)
    let movieMaker = CXEImagesToVideo(videoSettings: settings)
    movieMaker.createMovieFrom(images: uiImages){ (fileURL:URL) in
        let video = AVAsset(url: fileURL)
        let playerItem = AVPlayerItem(asset: video)
        let avPlayer = AVPlayer(playerItem: playerItem)
        let playerLayer = AVPlayerLayer(player: avPlayer)
        playerLayer.frame = CGRect(x: 0, y: 0, width: UIScreen.main.bounds.width, height: UIScreen.main.bounds.width * 3.0 / 4.0)
        self.view.layer.addSublayer(playerLayer)
        avPlayer.play()
    }

export or play the video with fileURL. There are two ways of async and sync. Gist:https://gist.github.com/Willib/b97b08d8d877ca5d875ff14abb4c3f1a


Constructing a Dictionary literal is straightforward:

import AVFoundation

let videoSettings = [
    AVVideoCodecKey: AVVideoCodecH264,
    AVVideoWidthKey: 640,
    AVVideoHeightKey: 480
]

As for everything else, I would encourage you to read through Apple's The Swift Programming Language to establish fundamentals first, rather than relying on SO or tutorials that happen to cover what you want to do. "Teach a man to fish", as they say.