Swift: how to record a screen video, or convert images to videos

Let say we want to capture the screen of your game and have it as a video in output.
Search results on “How to record screen video in swift” will lead us to RPScreenRecorder, it’s kind of cool, but for security reasons you are no allowed to save video or even access it. The only trick how to solve it is to make number of screenshots, save them as an array of Images. Unfortunately, you can deal with some performance issues here, it could be feasible to make like 30 fps capturing but it depends on many factors. For my game I came up with 30/5 fps, here’s the whole code:

//
//  ScreenRecorderService.swift
//  FlappyBirdScream
//
//  Created by Mikita Manko on 4/25/17.
//  Copyright © 2017 Mikita Manko. All rights reserved.
//
 
import Foundation
import AVFoundation
import UIKit
 
//  ScreenRecorderService.swift
//  FlappyBirdScream
//
//  Created by Mikita Manko on 4/25/17.
//  Copyright © 2017 Mikita Manko. All rights reserved.
//
 
import Foundation
import AVFoundation
import UIKit
 
class ScreenRecorderService {
    private let fpsToSkip : Int = 4
    private var frameNumber : Int = 0
 
    var view : UIView?
    private var frames = [UIImage]()
    private var recording = false
 
 
 
    func start(view: UIView) {
        self.view = view
        recording = true
        //frames.removeAll()
    }
 
    func getFramesToSkip() -> Int {
        return fpsToSkip
    }
 
    func getFames() -> [UIImage] {
        return frames
    }
 
    /**
     * This method triggers every frame (60 fps).
     * It's too overhelming from performance stand point to make 60 screen per second 
     *   (todo: investigate use separate thread for that)
     * Let's make 60/5 screen per second.
     */
    func update() {
        if recording, let view = self.view {
            if frameNumber == 0 {
                frameNumber = fpsToSkip
                frames.append(ImageUtils.captureScreen(view: view))
            } else if frameNumber > 0 {
                frameNumber = frameNumber - 1
            }
        }
    }
 
    func stop() {
        recording = false
        if IS_DEBUG {
            print("ScreenRecorderService stopped with number of frames: " + String(frames.count))
        }
    }
 
 
    func generateVideoUrl(complete: @escaping(_:URL)->()) {
        let settings = ImagesToVideoUtils.videoSettings(codec: AVVideoCodecJPEG /*AVVideoCodecH264*/, width: (frames[0].cgImage?.width)!, height: (frames[0].cgImage?.height)!)
        let movieMaker = ImagesToVideoUtils(videoSettings: settings)
 
        //Override fps
        movieMaker.frameTime = CMTimeMake(1, Int32(60 / (1 + self.fpsToSkip)))
        movieMaker.createMovieFrom(images: frames) { (fileURL:URL) in
            complete(fileURL)
        }
 
    }
 
    func saveAsVideo() {
        generateVideoUrl(complete: { (fileURL:URL) in
 
            VideoService.saveVideo(url: fileURL, complete: {saved in
                print("animation video save complete")
                print(saved)
            })
            //let video = AVAsset(url: fileURL)
            //let playerItem = AVPlayerItem(asset: video)
            //let player = CXEPlayer()
            //player.setPlayerItem(playerItem: playerItem)
            //self.playerView.player = player
        })
    }
}
 
class ScreenRecorderService {
    private let fpsToSkip : Int = 4
    private var frameNumber : Int = 0
 
    var view : UIView?
    private var frames = [UIImage]()
    private var recording = false
 
 
 
    func start(view: UIView) {
        self.view = view
        recording = true
        //frames.removeAll()
    }
 
    func getFramesToSkip() -> Int {
        return fpsToSkip
    }
 
    func getFames() -> [UIImage] {
        return frames
    }
 
    /**
     * This method triggers every frame (60 fps).
     * It's too overhelming from performance stand point to make 60 screen per second 
     *   (todo: investigate use separate thread for that)
     * Let's make 60/5 screen per second.
     */
    func update() {
        if recording, let view = self.view {
            if frameNumber == 0 {
                frameNumber = fpsToSkip
                frames.append(ImageUtils.captureScreen(view: view))
            } else if frameNumber > 0 {
                frameNumber = frameNumber - 1
            }
        }
    }
 
    func stop() {
        recording = false
        if IS_DEBUG {
            print("ScreenRecorderService stopped with number of frames: " + String(frames.count))
        }
    }
 
 
    func generateVideoUrl(complete: @escaping(_:URL)->()) {
        let settings = ImagesToVideoUtils.videoSettings(codec: AVVideoCodecJPEG /*AVVideoCodecH264*/, width: (frames[0].cgImage?.width)!, height: (frames[0].cgImage?.height)!)
        let movieMaker = ImagesToVideoUtils(videoSettings: settings)
 
        //Override fps
        movieMaker.frameTime = CMTimeMake(1, Int32(60 / (1 + self.fpsToSkip)))
        movieMaker.createMovieFrom(images: frames) { (fileURL:URL) in
            complete(fileURL)
        }
 
    }
 
    func saveAsVideo() {
        generateVideoUrl(complete: { (fileURL:URL) in
 
            VideoService.saveVideo(url: fileURL, complete: {saved in
                print("animation video save complete")
                print(saved)
            })
        })
    }
}

and saveVideo does something like

    static func saveVideo(url: URL, complete:@escaping(_:Bool)->()) {
        VideoService.checkPhotoLibraryPermissions(result: {granted in
            if granted {
                let library = PHPhotoLibrary.shared()
                library.performChanges({
                    PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: url)
                }, completionHandler: {success, error in
                    if success {
                        if IS_DEBUG { print("VideoService: Video file have been saved successfully") }
                    } else {
                        print("\n\nVideoService:  Error: failed to save video file.")
                        print(error ?? "unknown")
                    }
                    complete(success)
                })      
            } else {
                complete(false)
            }
        })
    }

And here’s what ImagesToVideoUtils is doing, basically how to convert array of images to the video

import Foundation
import AVFoundation
import UIKit
 
typealias CXEMovieMakerCompletion = (URL) -> Void
typealias CXEMovieMakerUIImageExtractor = (AnyObject) -> UIImage?
 
 
public class ImagesToVideoUtils: NSObject {
 
    static let paths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
    static let tempPath = paths[0] + "/exprotvideo.mp4"
    static let fileURL = URL(fileURLWithPath: tempPath)
 
 
    var assetWriter:AVAssetWriter!
    var writeInput:AVAssetWriterInput!
    var bufferAdapter:AVAssetWriterInputPixelBufferAdaptor!
    var videoSettings:[String : Any]!
    var frameTime:CMTime!
 
    var completionBlock: CXEMovieMakerCompletion?
    var movieMakerUIImageExtractor:CXEMovieMakerUIImageExtractor?
 
 
    public class func videoSettings(codec:String, width:Int, height:Int) -> [String: Any]{
        if(Int(width) % 16 != 0){
            print("warning: video settings width must be divisible by 16")
        }
 
        let videoSettings:[String: Any] = [AVVideoCodecKey: AVVideoCodecJPEG, //AVVideoCodecH264,
                                           AVVideoWidthKey: width,
                                           AVVideoHeightKey: height]
 
        return videoSettings
    }
 
    public init(videoSettings: [String: Any]) {
        super.init()
 
 
        if(FileManager.default.fileExists(atPath: ImagesToVideoUtils.tempPath)){
            guard (try? FileManager.default.removeItem(atPath: ImagesToVideoUtils.tempPath)) != nil else {
                print("remove path failed")
                return
            }
        }
 
 
        self.assetWriter = try! AVAssetWriter(url: ImagesToVideoUtils.fileURL, fileType: AVFileTypeQuickTimeMovie)
 
        self.videoSettings = videoSettings
        self.writeInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)
        assert(self.assetWriter.canAdd(self.writeInput), "add failed")
 
        self.assetWriter.add(self.writeInput)
        let bufferAttributes:[String: Any] = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32ARGB)]
        self.bufferAdapter = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: self.writeInput, sourcePixelBufferAttributes: bufferAttributes)
        self.frameTime = CMTimeMake(1, 5)
    }
 
    func createMovieFrom(urls: [URL], withCompletion: @escaping CXEMovieMakerCompletion){
        self.createMovieFromSource(images: urls as [AnyObject], extractor:{(inputObject:AnyObject) ->UIImage? in
            return UIImage(data: try! Data(contentsOf: inputObject as! URL))}, withCompletion: withCompletion)
    }
 
    func createMovieFrom(images: [UIImage], withCompletion: @escaping CXEMovieMakerCompletion){
        self.createMovieFromSource(images: images, extractor: {(inputObject:AnyObject) -> UIImage? in
            return inputObject as? UIImage}, withCompletion: withCompletion)
    }
 
    func createMovieFromSource(images: [AnyObject], extractor: @escaping CXEMovieMakerUIImageExtractor, withCompletion: @escaping CXEMovieMakerCompletion){
        self.completionBlock = withCompletion
 
        self.assetWriter.startWriting()
        self.assetWriter.startSession(atSourceTime: kCMTimeZero)
 
        let mediaInputQueue = DispatchQueue(label: "mediaInputQueue")
        var i = 0
        let frameNumber = images.count
 
        self.writeInput.requestMediaDataWhenReady(on: mediaInputQueue){
            while(true){
                if(i >= frameNumber){
                    break
                }
 
                if (self.writeInput.isReadyForMoreMediaData){
                    var sampleBuffer:CVPixelBuffer?
                    autoreleasepool{
                        let img = extractor(images[i])
                        if img == nil{
                            i += 1
                            print("Warning: counld not extract one of the frames")
                            //continue
                        }
                        sampleBuffer = self.newPixelBufferFrom(cgImage: img!.cgImage!)
                    }
                    if (sampleBuffer != nil){
                        if(i == 0){
                            self.bufferAdapter.append(sampleBuffer!, withPresentationTime: kCMTimeZero)
                        }else{
                            let value = i - 1
                            let lastTime = CMTimeMake(Int64(value), self.frameTime.timescale)
                            let presentTime = CMTimeAdd(lastTime, self.frameTime)
                            self.bufferAdapter.append(sampleBuffer!, withPresentationTime: presentTime)
                        }
                        i = i + 1
                    }
                }
            }
            self.writeInput.markAsFinished()
            self.assetWriter.finishWriting {
                DispatchQueue.main.sync {
                    self.completionBlock!(ImagesToVideoUtils.fileURL)
                }
            }
        }
    }
 
    func newPixelBufferFrom(cgImage:CGImage) -> CVPixelBuffer?{
        let options:[String: Any] = [kCVPixelBufferCGImageCompatibilityKey as String: true, kCVPixelBufferCGBitmapContextCompatibilityKey as String: true]
        var pxbuffer:CVPixelBuffer?
        let frameWidth = self.videoSettings[AVVideoWidthKey] as! Int
        let frameHeight = self.videoSettings[AVVideoHeightKey] as! Int
 
        let status = CVPixelBufferCreate(kCFAllocatorDefault, frameWidth, frameHeight, kCVPixelFormatType_32ARGB, options as CFDictionary?, &pxbuffer)
        assert(status == kCVReturnSuccess && pxbuffer != nil, "newPixelBuffer failed")
 
        CVPixelBufferLockBaseAddress(pxbuffer!, CVPixelBufferLockFlags(rawValue: 0))
        let pxdata = CVPixelBufferGetBaseAddress(pxbuffer!)
        let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
        let context = CGContext(data: pxdata, width: frameWidth, height: frameHeight, bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(pxbuffer!), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue)
        assert(context != nil, "context is nil")
 
        context!.concatenate(CGAffineTransform.identity)
        context!.draw(cgImage, in: CGRect(x: 0, y: 0, width: cgImage.width, height: cgImage.height))
        CVPixelBufferUnlockBaseAddress(pxbuffer!, CVPixelBufferLockFlags(rawValue: 0))
        return pxbuffer
    }
}

That is it. All researches have been done during the development of this game – scream to fly.

Social Share Toolbar