Exporting video in Swift in bigger canvas and fill the background with transparancy

129 Views Asked by At

Problem

I got a video which I want to place in a canvas that is bigger than the video itself. I do this with AVMutableVideoComposition tool and by increasing the renderSize property.

No matter what I do, I can not get the transparency working at the resized canvas. I tried using masks, setting backgroundColors and more CALayers, but I could not get the transparency to stick.

Reproduction

I created this repository in which you can just run the test and see the resulting video for yourself: https://github.com/Jasperav/TransparencyVideo/tree/main/TransparencyVideoTests

Below is the code for how I import and export the video (same code in the repo above):

Call side

let video = Bundle(for: TransparencyVideoTests.self).url(forResource: "transparency", withExtension: ".mov")!
let fileManager = FileManager.default.urls(for: .cachesDirectory, in: .userDomainMask)[0]

await VideoEditor().export(url: video, outputDir: fileManager.appending(path: "temp.mov"))

VideoEditor

import AppKit
import AVFoundation
import Foundation
import Photos
import QuartzCore
import OSLog

let logger = Logger()

class VideoEditor {
    func export(
        url: URL,
        outputDir: URL
    ) async {
        let asset = AVURLAsset(url: url)
        let extract = try! await extractData(videoAsset: asset)
    
        try! await exportVideo(outputPath: outputDir, asset: asset, videoComposition: extract)
    }

    private func exportVideo(outputPath: URL, asset: AVAsset, videoComposition: AVMutableVideoComposition) async throws {
        let fileExists = FileManager.default.fileExists(atPath: outputPath.path())

        logger.debug("Output dir: \(outputPath), exists: \(fileExists), render size: \(String(describing: videoComposition.renderSize))")

        if fileExists {
            do {
                try FileManager.default.removeItem(atPath: outputPath.path())
            } catch {
                logger.error("remove file failed")
            }
        }

        let dir = outputPath.deletingLastPathComponent().path()

        logger.debug("Will try to create dir: \(dir)")

        try? FileManager.default.createDirectory(atPath: dir, withIntermediateDirectories: true)

        var isDirectory = ObjCBool(false)

        guard FileManager.default.fileExists(atPath: dir, isDirectory: &isDirectory), isDirectory.boolValue else {
            logger.error("Could not create dir, or dir is a file")

            fatalError()
        }

        guard let exporter = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHEVCHighestQualityWithAlpha) else {
            logger.error("generate export failed")

            fatalError()
        }

        exporter.outputURL = outputPath
        exporter.outputFileType = .mov
        exporter.shouldOptimizeForNetworkUse = false
        exporter.videoComposition = videoComposition

        await exporter.export()

        logger.debug("Status: \(String(describing: exporter.status)), error: \(exporter.error)")

        if exporter.status != .completed {
            fatalError()
        }
    }

    private func extractData(videoAsset: AVURLAsset) async throws -> AVMutableVideoComposition {
        guard let videoTrack = try await videoAsset.loadTracks(withMediaType: .video).first else {
            fatalError()
        }

        guard let audioTrack = try await videoAsset.loadTracks(withMediaType: .audio).first else {
            fatalError()
        }

        let composition = AVMutableComposition(urlAssetInitializationOptions: nil)

        guard let compositionVideoTrack = composition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: videoTrack.trackID) else {
            fatalError()
        }
        guard let compostiionAudioTrack = composition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: audioTrack.trackID) else {
            fatalError()
        }

        let duration = try await videoAsset.load(.duration)

        try compositionVideoTrack.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: duration), of: videoTrack, at: CMTime.zero)
        try compostiionAudioTrack.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: duration), of: audioTrack, at: CMTime.zero)

        let naturalSize = try await videoTrack.load(.naturalSize)
        let preferredTransform = try await videoTrack.load(.preferredTransform)
        let mainInstruction = AVMutableVideoCompositionInstruction()

        mainInstruction.timeRange = CMTimeRange(start: CMTime.zero, end: duration)

        let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
        let videoComposition = AVMutableVideoComposition()
 
        videoComposition.frameDuration = CMTimeMake(value: 1, timescale: 30)

        mainInstruction.layerInstructions = [layerInstruction]
        videoComposition.instructions = [mainInstruction]
        
        // Increases the canvas
        videoComposition.renderSize = .init(width: naturalSize.width + 500, height: naturalSize.height + 500)

        return videoComposition
    }

}

After uploading the resulting temp.mov (see console logging), black edges are shown (from https://rotato.app/tools/transparent-video):

enter image description here

How to make sure the background is filled with a transparant color?

1

There are 1 best solutions below

2
user1874594 On

modify your VideoEditor class . This is just a quick & dirty approach

import AVFoundation

class VideoEditor {
    func export(url: URL, outputDir: URL) async {
        let asset = AVURLAsset(url: url)
        let videoComposition = try await createVideoComposition(asset: asset)
        try! await exportVideo(outputPath: outputDir, asset: asset, videoComposition: videoComposition)
    }
    
    private func exportVideo(outputPath: URL, asset: AVAsset, videoComposition: AVVideoComposition) async throws {
        // Same as before
        // Configure export session with video composition
    }
    
    private func createVideoComposition(asset: AVAsset) async throws -> AVVideoComposition {
        // Extract video track
        guard let videoTrack = asset.tracks(withMediaType: .video).first else {
            throw NSError(domain: "VideoEditor", code: 1, userInfo: [NSLocalizedDescriptionKey: "Video track not found"])
        }
        
        // Create video composition instruction
        let mainInstruction = AVMutableVideoCompositionInstruction()
        mainInstruction.timeRange = CMTimeRange(start: .zero, duration: asset.duration)
        
        // Create layer instruction for video track
        let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
        
        // Configure transformation for video layer
        let naturalSize = videoTrack.naturalSize
        let transform = CGAffineTransform(translationX: 250, y: 250) // Translate to center of increased canvas
        layerInstruction.setTransform(transform, at: .zero)
        
        // Set up video composition
        let videoComposition = AVMutableVideoComposition()
        videoComposition.renderSize = CGSize(width: naturalSize.width + 500, height: naturalSize.height + 500)
        videoComposition.frameDuration = CMTime(value: 1, timescale: 30)
        mainInstruction.layerInstructions = [layerInstruction]
        videoComposition.instructions = [mainInstruction]
        
        // Set background color to transparent
        videoComposition.backgroundColor = UIColor.clear.cgColor
        
        return videoComposition
    }
}

Then explicitly set the backgroundColor property of the AVMutableVideoComposition to UIColor.clear.cgColor, which sets the background color of the video to transparent