How can I implement different camera modes or styles in my iOS camera app using pageview controller

51 Views Asked by At

This code sets up a basic page-based navigation system for switching between different instances of filtercameraViewController. If you have specific questions or need further assistance with this code, please let me know.

this is my code for page view controller for camera views like photographic styles.

import UIKit
import AVFoundation

class filterViewController: UIPageViewController, UIPageViewControllerDelegate, UIPageViewControllerDataSource {
    var cameraViewControllers: [UIViewController] = []
 
  
    override func viewDidLoad() {
        super.viewDidLoad()

        self.dataSource = self
        self.delegate = self

        // Add your camera view controllers
       
        let camera1ViewController = filtercameraViewController()
    
      

        let camera2ViewController = filtercameraViewController()
 

        let camera3ViewController = filtercameraViewController()


        cameraViewControllers = [camera1ViewController, camera2ViewController, camera3ViewController]
        
    

        if let firstView = cameraViewControllers.first {
            setViewControllers([firstView], direction: .forward, animated: true, completion: nil)
        }

    }
    // In filterViewController
  
   
    func setPageSize() {
           // Set the page size to match the size of filtercameraViewController
           let pageWidth = UIScreen.main.bounds.width
           let pageHeight = UIScreen.main.bounds.height / 5.0 // Adjust as needed
           
           self.view.frame = CGRect(x: 0, y: 0, width: pageWidth, height: pageHeight)
       }
    
    func pageViewController(_ pageViewController: UIPageViewController, viewControllerBefore viewController: UIViewController) -> UIViewController? {
        guard let currentIndex = cameraViewControllers.firstIndex(of: (viewController as? filtercameraViewController)!  ) else { return nil }
        let previousIndex = currentIndex - 1
        if previousIndex < 0 {
            return nil
        }
        return cameraViewControllers[previousIndex]
    }

    func pageViewController(_ pageViewController: UIPageViewController, viewControllerAfter viewController: UIViewController) -> UIViewController? {
        guard let currentIndex = cameraViewControllers.firstIndex(of: (viewController as? filtercameraViewController)!) else { return nil }
        let nextIndex = currentIndex + 1
        if nextIndex >= cameraViewControllers.count {
            return nil
        }
        return cameraViewControllers[nextIndex]
    }

    override init(transitionStyle style: UIPageViewController.TransitionStyle, navigationOrientation: UIPageViewController.NavigationOrientation, options: [UIPageViewController.OptionsKey: Any]? = nil) {
        super.init(transitionStyle: .scroll, navigationOrientation: .horizontal, options: nil)
    }

    required init?(coder: NSCoder) {
        fatalError("init(coder:) has not been implemented")
    }
   
}

The code initializes the camera view, sets up the camera with an AVCaptureSession, and adds AVCaptureDevice input and AVCapturePhotoOutput to the session. It also sets up a preview layer for the camera view.

import UIKit
import AVFoundation

class filtercameraViewController: cameraViewController {
    
  //  var CameraView : UIView!
    
  
    
    override func viewDidLoad() {
        super.viewDidLoad()
        
        
       
         
         
         cameraView = UIView ()
         cameraView.translatesAutoresizingMaskIntoConstraints = false
         view.addSubview(cameraView)
//CameraView.backgroundColor = .black
         let X: CGFloat = 40.0
         let Y: CGFloat = 40.0
       //  let X: CGFloat = 60.0
         let x : CGFloat = 140.0
         
         NSLayoutConstraint.activate([
         cameraView.leadingAnchor.constraint(equalTo: view.leadingAnchor, constant:  Y),
         cameraView.trailingAnchor.constraint(equalTo: view.trailingAnchor, constant: -Y),
         cameraView.topAnchor.constraint(equalTo: view.topAnchor, constant: X),
         cameraView.bottomAnchor.constraint(equalTo: view.bottomAnchor, constant: -x)
         
        ])
        
              captureSession = AVCaptureSession()
         //     captureSession.sessionPreset = currentAspectRatio
              
              if let device = AVCaptureDevice.default(for: .video) {
                  do {
                      let input = try AVCaptureDeviceInput(device: device)
                      if ((captureSession.canAddInput(input)) ) {
                          captureSession.addInput(input)
                      }
                      
                      stillImageOutput = AVCapturePhotoOutput() // Initialize it as an AVCapturePhotoOutput
                      if ((captureSession.canAddOutput(stillImageOutput!))) { // Unwrap the optional
                          captureSession.addOutput(stillImageOutput!)
                      }
                      
                      previewLayer = AVCaptureVideoPreviewLayer(session: captureSession!)
                      
                      cameraView?.layer.addSublayer(previewLayer)
                      
                      DispatchQueue.global(qos: .userInitiated).async {
                          self.captureSession.startRunning()
                      }
                  } catch {
                      print("Error setting up camera: \(error)")
                  }
              }
          
        
         }
         
         
         

    
        
        
    }

This is the main class of my code here is in have button to open pages. after opening pages it could apply a filters i put's small bit of code ..thats enough for you i think :

import UIKit
import AVFoundation
import Photos
import CoreMotion

class cameraViewController: UIViewController,AVCapturePhotoCaptureDelegate,  UIGestureRecognizerDelegate {
    
    
    
    var cameraView: UIView!
    var captureButton: UIButton!
    var cameraSwitchButton: UIButton!
    var flashModeButton: UIButton!
    
    var captureSession: AVCaptureSession!
    var stillImageOutput: AVCapturePhotoOutput!
    var previewLayer: AVCaptureVideoPreviewLayer!
    var currentZoomFactor: CGFloat = 1.0
    var maxZoomFactor: CGFloat = 5.0
    var zoomStep: CGFloat = 1.0
    var currentFlashMode: AVCaptureDevice.FlashMode = .auto // set flash mode on auto
    var isFlashOn: Bool = false
    // var exposurePoint = CGPoint(x: 0.5, y: 0.5)
    //  var focusPoint = CGPoint(x: 0.5, y: 0.5)
    var boxView: UIView!
    var exposureSlider: UISlider!
    var currentExposureBias: Float = 0.0
    var currentFocusLevel: Float = 0.0 // calculate
    var motionManager: CMMotionManager!
    var yawAngle: Double = 0.0
    var segmentedControll : UISegmentedControl! // segment controll for select a aspect ratio
    var currentAspectRatio: AVCaptureSession.Preset = .hd1920x1080 // set default aspet ratio on capture session
    
    var timerButton : UIButton!
    var timerDuration: TimeInterval = 0
    var timerLabel: UILabel!
    var timer : Timer?
    
    var styleButton : UIButton!
    
    var device : AVCaptureDevice!
    
    var lastManualExposureValue: Float = 0.0
    
    var index : Int = 0
    
    //   var pageViewController: UIPageViewController!
    
    var isStyleViewActive = false
    
    var cameraViewControllers: [cameraViewController] = []
    
    override func viewDidLoad() {
        super.viewDidLoad()
        
        setupCameraView()
        setupCamera()
        setupButtons()
        setupBoxView()
        setupExposureSlider()
        setupGestures()
        setupMotionManager()
        // configureCamera()
        
        
        
    }
    
    
    //it was using for hide status bar on display eg:battery status,singnal
    override  var prefersStatusBarHidden: Bool{
        return true
    }
    
    override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
        return .portrait
    }
    
    
    
    
    override func viewDidAppear(_ animated: Bool) {
        super.viewDidAppear(animated)
        
        previewLayer?.connection?.videoOrientation = currentVideoOrientation()
        previewLayer?.frame = cameraView.bounds
        
        // previewLayer.videoGravity = .resizeAspect
        
    }
    override func viewWillTransition(to size: CGSize, with coordinator: UIViewControllerTransitionCoordinator) {
    }
    
    
    
    
    
    func setupCameraView() {
        cameraView = UIView()
        cameraView.translatesAutoresizingMaskIntoConstraints = false
        view.addSubview(cameraView)
        NSLayoutConstraint.activate([
            cameraView.leadingAnchor.constraint(equalTo: view.leadingAnchor),
            cameraView.trailingAnchor.constraint(equalTo: view.trailingAnchor),
            cameraView.topAnchor.constraint(equalTo: view.topAnchor),
            cameraView.bottomAnchor.constraint(equalTo: view.bottomAnchor)
        ])
    }
    
   
    func setupCamera() {
        captureSession = AVCaptureSession()
        captureSession.sessionPreset = currentAspectRatio
        
        if let device = AVCaptureDevice.default(for: .video) {
            do {
                let input = try AVCaptureDeviceInput(device: device)
                if ((captureSession.canAddInput(input)) ) {
                    captureSession.addInput(input)
                }
                
                stillImageOutput = AVCapturePhotoOutput() // Initialize it as an AVCapturePhotoOutput
                if ((captureSession.canAddOutput(stillImageOutput!))) { // Unwrap the optional
                    captureSession.addOutput(stillImageOutput!)
                }
                
                previewLayer = AVCaptureVideoPreviewLayer(session: captureSession!)
                
                cameraView?.layer.addSublayer(previewLayer)
                
                DispatchQueue.global(qos: .userInitiated).async {
                    self.captureSession.startRunning()
                }
            } catch {
                print("Error setting up camera: \(error)")
            }
        }
    }

I have created an iOS camera app with a main camera view, and I want to add support for different camera modes or styles, similar to the photographic styles in iPhone 13. Each mode should have its own set of camera settings and UI elements.

What is the best way to implement this feature in my iOS app? Can you provide some guidance or code examples on how to create different camera modes and switch between them?

I'm looking for advice on the overall architecture and how to handle camera settings and UI transitions between modes effectively. Any tips, code snippets, or references to relevant documentation would be greatly appreciated.

0

There are 0 best solutions below