I am using CIDetector to recognize rectangles. I have gotten as far as having the camera, and a colored overlay that shows the recognized rectangle. However, I want to be able to take the space from that overlay (not the overlay itself), and be able to create an image view with only the image from that rectangle area.
Here is my code:
import UIKit
class ViewController: UIViewController {
@IBOutlet weak var croppedImg: UIImageView!
var videoFilter: CoreImageVideoFilter?
var detector: CIDetector?
var image: CIImage?
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
var timer = Timer.scheduledTimer(timeInterval: 0.1, target: self, selector: "update", userInfo: nil, repeats: true)
// Create the video filter
videoFilter = CoreImageVideoFilter(superview: view, applyFilterCallback: nil)
// Simulate a tap on the mode selector to start the process
if let videoFilter = videoFilter {
videoFilter.stopFiltering()
detector = prepareRectangleDetector()
videoFilter.applyFilter = {
image in
return self.performRectangleDetection(image)
}
videoFilter.startFiltering()
}
}
//MARK: Utility methods
func performRectangleDetection(_ image: CIImage) -> CIImage? {
var resultImage: CIImage?
if let detector = detector {
// Get the detections
let features = detector.features(in: image)
for feature in features as! [CIRectangleFeature] {
resultImage = drawHighlightOverlayForPoints(image, topLeft: feature.topLeft, topRight: feature.topRight,
bottomLeft: feature.bottomLeft, bottomRight: feature.bottomRight)
}
}
return resultImage
}
/* func performQRCodeDetection(_ image: CIImage) -> (outImage: CIImage?, decode: String) {
var resultImage: CIImage?
var decode = ""
if let detector = detector {
let features = detector.features(in: image)
for feature in features as! [CIQRCodeFeature] {
resultImage = drawHighlightOverlayForPoints(image, topLeft: feature.topLeft, topRight: feature.topRight,
bottomLeft: feature.bottomLeft, bottomRight: feature.bottomRight)
decode = feature.messageString!
}
}
return (resultImage, decode)
}*/
func prepareRectangleDetector() -> CIDetector {
let options: [String: Any] = [CIDetectorAccuracy: CIDetectorAccuracyHigh, CIDetectorAspectRatio: 1.0]
return CIDetector(ofType: CIDetectorTypeRectangle, context: nil, options: options)!
}
/* func prepareQRCodeDetector() -> CIDetector {
let options = [CIDetectorAccuracy: CIDetectorAccuracyLow]
return CIDetector(ofType: CIDetectorTypeQRCode, context: nil, options: options)!
}*/
func drawHighlightOverlayForPoints(_ image: CIImage, topLeft: CGPoint, topRight: CGPoint,
bottomLeft: CGPoint, bottomRight: CGPoint) -> CIImage {
var overlay = CIImage(color: CIColor(red: 0.0, green: 0.4, blue: 0.6, alpha: 0.5))
overlay = overlay.cropping(to: image.extent)
overlay = overlay.applyingFilter("CIPerspectiveTransformWithExtent",
withInputParameters: [
"inputExtent": CIVector(cgRect: image.extent),
"inputTopLeft": CIVector(cgPoint: topLeft),
"inputTopRight": CIVector(cgPoint: topRight),
"inputBottomLeft": CIVector(cgPoint: bottomLeft),
"inputBottomRight": CIVector(cgPoint: bottomRight)
])
/* if resultImage != nil{
croppedImg.image = UIImage(ciImage: resultImage!)
croppedImg.frame = (image.extent)
croppedImg.layer.borderWidth = 8
croppedImg.layer.borderColor = UIColor.red.cgColor
}*/
return overlay.compositingOverImage(image)
}
func update() {
// Something cool
// if resultImage != nil{
//croppedImg.image = UIImage(ciImage: (resultImage)!)
// }
}
}
Cropped Image is a smaller image in the corner of the screen where I want to display only the rectangle that is detected.