How to merge Two or more videos as overlay on main video using AVMutableComposition in iOS Swift

73 Views Asked by At

I am working on merging two or more videos as an overlay on the main background video with AVMutableComposition in Swift.

This is my code for video overlay.

    func overlayOnBackVideo(mainVideoURL:URL,overlayVideoURL:URL,completion:@escaping Completion) -> Void{
        var arrayLayerInstructions:[AVMutableVideoCompositionLayerInstruction] = []
        let composition = AVMutableComposition.init()
        
        let mainVideoURL = Bundle.main.url(forResource: "video2", withExtension: "MOV")!
        let overlayVideoURL = Bundle.main.url(forResource: "video3", withExtension: "MOV")!
        
        let mainVideoAsset = AVAsset(url: mainVideoURL)
        let overlayVideoAsset = AVAsset(url: overlayVideoURL)
        
        
        
        guard let mainVideoTrack = mainVideoAsset.tracks(withMediaType: .video).first,
              let overlayVideoTrack = overlayVideoAsset.tracks(withMediaType: .video).first else {
            return
        }
                
        let mainCompositionVideoTrack = composition.addMutableTrack(withMediaType: .video,
                                                                    preferredTrackID: 0)
        let overlayCompositionVideoTrack = composition.addMutableTrack(withMediaType: .video,
                                                                       preferredTrackID:1)
        do {
            try mainCompositionVideoTrack?.insertTimeRange(CMTimeRangeMake(start: .zero, duration: mainVideoAsset.duration),
                                                       of: mainVideoTrack,
                                                       at: .zero)
            
            try overlayCompositionVideoTrack?.insertTimeRange(CMTimeRangeMake(start: .zero, duration: overlayVideoAsset.duration),
                                                       of: overlayVideoTrack,
                                                       at: .zero)
            
        } catch {
            print("Error inserting video tracks: \(error)")
            return
        }
        
        let outputSize = CGSize(width: mainVideoTrack.naturalSize.width, height: mainVideoTrack.naturalSize.height)
        
        let videoLayer = CALayer()
        videoLayer.frame = CGRect(x: 0, y: 0, width: outputSize.width, height: outputSize.height)
        
        let overlayLayer = CALayer()
        overlayLayer.frame = CGRect(x: 150, y: 200, width: 500, height: 500) // Match the video size

        // Create a mask shape for the overlay layer
        let maskPath = UIBezierPath(rect: CGRect(x: 150, y: 200, width: 500, height: 500)) // Specify the overlay area
        let maskShapeLayer = CAShapeLayer()
        maskShapeLayer.path = maskPath.cgPath
        overlayLayer.mask = maskShapeLayer

        
        videoLayer.addSublayer(overlayLayer)
        
        // Create a video composition instruction for the main video track
        let mainLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: mainCompositionVideoTrack!)
        mainLayerInstruction.setTransform(mainVideoTrack.preferredTransform, at: .zero)

        // Create a video composition instruction for the overlay video track
        let overlayLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: overlayCompositionVideoTrack!)
        overlayLayerInstruction.setTransform(overlayVideoTrack.preferredTransform, at: .zero)

        // Apply a translation to position the overlay video
        let videoSize = mainVideoTrack.naturalSize
        let overlaySize = overlayVideoTrack.naturalSize
        let deltaX = videoSize.width - overlaySize.width - 150 // Adjust this as needed
        let deltaY = videoSize.height - overlaySize.height - 200 // Adjust this as needed
        overlayLayerInstruction.setTransform(CGAffineTransform(translationX: deltaX, y: deltaY), at: .zero)

        // Add the layer instructions to the array
        arrayLayerInstructions.append(mainLayerInstruction)
        arrayLayerInstructions.append(overlayLayerInstruction)
  main instruction // Main video composition instruction
        let mainInstruction = AVMutableVideoCompositionInstruction()
        mainInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: CMTimeAdd(mainVideoAsset.duration, overlayVideoAsset.duration))
        mainInstruction.layerInstructions = arrayLayerInstructions
        
        // Main video composition
        let mainComposition = AVMutableVideoComposition()
        mainComposition.instructions = [mainInstruction]
        mainComposition.frameDuration = CMTimeMake(value: 1,timescale: 30)
        mainComposition.renderSize = outputSize
        mainComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayers: [overlayLayer,videoLayer], in: videoLayer)

        //save export
        let path = NSTemporaryDirectory().appending("strory.mp4")
        let exportURL = URL.init(fileURLWithPath: path)
        try? FileManager.default.removeItem(atPath: path)
        
        let exporter = AVAssetExportSession.init(asset: composition, presetName:AVAssetExportPresetHighestQuality)
        exporter?.outputURL = exportURL
        exporter?.outputFileType = .mp4
        exporter?.shouldOptimizeForNetworkUse = true
        exporter?.videoComposition = mainComposition
        
        exporter?.exportAsynchronously(completionHandler: {
            switch exporter?.status {
            case .completed:
                print("Export completed!")
                completion(exportURL,nil)
            case .failed, .unknown, .cancelled:
                print("Export failed: \(exporter?.error)")
                completion(nil,exporter?.error)
            default:
                break
            }
        })
    }

I need your help in figuring out where I went wrong. I have both layers with the same video, but one layer is overlapping the other. Can you please assist me in identifying my mistake?

0

There are 0 best solutions below