Skip to content

Commit

Permalink
Add the ability to detect QR codes as well as rectangles. Project done!
Browse files Browse the repository at this point in the history
  • Loading branch information
Sam Davies committed Jul 31, 2014
1 parent fde30f4 commit fb7b60c
Show file tree
Hide file tree
Showing 3 changed files with 133 additions and 31 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,38 @@
<view key="view" contentMode="scaleToFill" id="8bC-Xf-vdC">
<rect key="frame" x="0.0" y="0.0" width="600" height="600"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<subviews>
<segmentedControl opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="left" contentVerticalAlignment="top" segmentControlStyle="plain" selectedSegmentIndex="0" translatesAutoresizingMaskIntoConstraints="NO" id="Sqn-cQ-OYS">
<rect key="frame" x="225" y="552" width="151" height="29"/>
<segments>
<segment title="Rectangle"/>
<segment title="QR Code"/>
</segments>
<connections>
<action selector="handleDetectorSelectionChange:" destination="BYZ-38-t0r" eventType="valueChanged" id="41u-8s-61n"/>
</connections>
</segmentedControl>
<label opaque="NO" clipsSubviews="YES" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="QR Decode" textAlignment="center" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="AwI-U6-1dk">
<rect key="frame" x="0.0" y="496" width="600" height="48"/>
<color key="backgroundColor" white="1" alpha="0.5" colorSpace="calibratedWhite"/>
<fontDescription key="fontDescription" name="HelveticaNeue-Thin" family="Helvetica Neue" pointSize="40"/>
<color key="textColor" cocoaTouchSystemColor="darkTextColor"/>
<nil key="highlightedColor"/>
</label>
</subviews>
<color key="backgroundColor" white="1" alpha="1" colorSpace="custom" customColorSpace="calibratedWhite"/>
<constraints>
<constraint firstItem="wfy-db-euE" firstAttribute="top" secondItem="Sqn-cQ-OYS" secondAttribute="bottom" constant="20" id="Fm2-Oa-8CY"/>
<constraint firstAttribute="centerX" secondItem="AwI-U6-1dk" secondAttribute="centerX" id="SwI-Q6-mGe"/>
<constraint firstItem="AwI-U6-1dk" firstAttribute="width" secondItem="8bC-Xf-vdC" secondAttribute="width" id="iNA-Jw-qgb"/>
<constraint firstItem="Sqn-cQ-OYS" firstAttribute="top" secondItem="AwI-U6-1dk" secondAttribute="bottom" constant="8" id="qGF-Or-n3b"/>
<constraint firstAttribute="centerX" secondItem="Sqn-cQ-OYS" secondAttribute="centerX" constant="-0.5" id="rD6-Sd-DMU"/>
</constraints>
</view>
<connections>
<outlet property="detectorModeSelector" destination="Sqn-cQ-OYS" id="HVh-4P-gRa"/>
<outlet property="qrDecodeLabel" destination="AwI-U6-1dk" id="00H-Ra-Xvr"/>
</connections>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="dkx-z0-nzr" sceneMemberID="firstResponder"/>
</objects>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ class CoreImageVideoFilter: NSObject, AVCaptureVideoDataOutputSampleBufferDelega
var videoDisplayViewBounds: CGRect!
var renderContext: CIContext!

var avSession: AVCaptureSession!
var avSession: AVCaptureSession?
var sessionQueue: dispatch_queue_t!

var detector: CIDetector?
Expand All @@ -39,6 +39,7 @@ class CoreImageVideoFilter: NSObject, AVCaptureVideoDataOutputSampleBufferDelega
videoDisplayView.transform = CGAffineTransformMakeRotation(CGFloat(M_PI_2))
videoDisplayView.frame = superview.bounds
superview.addSubview(videoDisplayView)
superview.sendSubviewToBack(videoDisplayView)

renderContext = CIContext(EAGLContext: videoDisplayView.context)
sessionQueue = dispatch_queue_create("AVSessionQueue", DISPATCH_QUEUE_SERIAL)
Expand All @@ -48,19 +49,34 @@ class CoreImageVideoFilter: NSObject, AVCaptureVideoDataOutputSampleBufferDelega
}

deinit {
avSession.stopRunning()
stopFiltering()
dispatch_release(sessionQueue)
}

func startFiltering() {
// Create a session if we don't already have one
if !avSession {
avSession = createAVSession()
}

// And kick it off
avSession?.startRunning()
}

func stopFiltering() {
// Stop the av session
avSession?.stopRunning()
}

func createAVSession() -> AVCaptureSession {
// Input from video camera
let device = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
var error: NSError?
let input = AVCaptureDeviceInput(device: device, error: &error)

// Start out with low quality
avSession = AVCaptureSession()
avSession.sessionPreset = AVCaptureSessionPresetMedium
let session = AVCaptureSession()
session.sessionPreset = AVCaptureSessionPresetMedium

// Output
let videoOutput = AVCaptureVideoDataOutput()
Expand All @@ -70,11 +86,10 @@ class CoreImageVideoFilter: NSObject, AVCaptureVideoDataOutputSampleBufferDelega
videoOutput.setSampleBufferDelegate(self, queue: sessionQueue)

// Join it all together
avSession.addInput(input)
avSession.addOutput(videoOutput)

// And kick it off
avSession.startRunning()
session.addInput(input)
session.addOutput(videoOutput)

return session
}


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,9 @@ import UIKit

class ViewController: UIViewController {

@IBOutlet var qrDecodeLabel: UILabel!
@IBOutlet var detectorModeSelector: UISegmentedControl!

var videoFilter: CoreImageVideoFilter?
var detector: CIDetector?

Expand All @@ -25,44 +28,98 @@ class ViewController: UIViewController {
// Do any additional setup after loading the view, typically from a nib.

// Create the video filter
videoFilter = CoreImageVideoFilter(superview: view, applyFilterCallback: {
image in
return self.performDetection(image)
})
videoFilter = CoreImageVideoFilter(superview: view, applyFilterCallback: nil)

// Start the video capture process
videoFilter?.startFiltering()
// Simulate a tap on the mode selector to start the process
detectorModeSelector.selectedSegmentIndex = 0
handleDetectorSelectionChange(detectorModeSelector)
}

@IBAction func handleDetectorSelectionChange(sender: UISegmentedControl) {
if let videoFilter = videoFilter {
videoFilter.stopFiltering()
self.qrDecodeLabel.hidden = true

switch sender.selectedSegmentIndex {
case 0:
detector = prepareRectangleDetector()
videoFilter.applyFilter = {
image in
return self.performRectangleDetection(image)
}
case 1:
self.qrDecodeLabel.hidden = false
detector = prepareQRCodeDetector()
videoFilter.applyFilter = {
image in
let found = self.performQRCodeDetection(image)
dispatch_async(dispatch_get_main_queue()) {
if found.decode != "" {
self.qrDecodeLabel.text = found.decode
}
}
return found.outImage
}
default:
videoFilter.applyFilter = nil
}

videoFilter.startFiltering()
}
}


func performDetection(image: CIImage) -> CIImage? {
//MARK: Utility methods
func performRectangleDetection(image: CIImage) -> CIImage? {
var resultImage: CIImage?
if !detector {
detector = prepareDetector()
}
if let detector = detector {
// Get the detections
let features = detector.featuresInImage(image)
for feature in features as [CIRectangleFeature] {
var overlay = CIImage(color: CIColor(red: 1.0, green: 0, blue: 0, alpha: 0.5))
overlay = overlay.imageByCroppingToRect(image.extent())
overlay = overlay.imageByApplyingFilter("CIPerspectiveTransformWithExtent",
withInputParameters: [
"inputExtent": CIVector(CGRect: image.extent()),
"inputTopLeft": CIVector(CGPoint: feature.topLeft),
"inputTopRight": CIVector(CGPoint: feature.topRight),
"inputBottomLeft": CIVector(CGPoint: feature.bottomLeft),
"inputBottomRight": CIVector(CGPoint: feature.bottomRight)
])
resultImage = overlay.imageByCompositingOverImage(image)
resultImage = drawHighlightOverlayForPoints(image, topLeft: feature.topLeft, topRight: feature.topRight,
bottomLeft: feature.bottomLeft, bottomRight: feature.bottomRight)
}
}
return resultImage
}

func prepareDetector() -> CIDetector {
func performQRCodeDetection(image: CIImage) -> (outImage: CIImage?, decode: String) {
var resultImage: CIImage?
var decode = ""
if let detector = detector {
let features = detector.featuresInImage(image)
for feature in features as [CIBarcodeFeature] {
resultImage = drawHighlightOverlayForPoints(image, topLeft: feature.topLeft, topRight: feature.topRight,
bottomLeft: feature.bottomLeft, bottomRight: feature.bottomRight)
decode = feature.codeString
}
}
return (resultImage, decode)
}

func prepareRectangleDetector() -> CIDetector {
let options = [CIDetectorAccuracy: CIDetectorAccuracyHigh, CIDetectorAspectRatio: 1.0]
return CIDetector(ofType: CIDetectorTypeRectangle, context: nil, options: options)
}

func prepareQRCodeDetector() -> CIDetector {
let options = [CIDetectorAccuracy: CIDetectorAccuracyHigh]
return CIDetector(ofType: CIDetectorTypeQRCode, context: nil, options: options)
}

func drawHighlightOverlayForPoints(image: CIImage, topLeft: CGPoint, topRight: CGPoint,
bottomLeft: CGPoint, bottomRight: CGPoint) -> CIImage {
var overlay = CIImage(color: CIColor(red: 1.0, green: 0, blue: 0, alpha: 0.5))
overlay = overlay.imageByCroppingToRect(image.extent())
overlay = overlay.imageByApplyingFilter("CIPerspectiveTransformWithExtent",
withInputParameters: [
"inputExtent": CIVector(CGRect: image.extent()),
"inputTopLeft": CIVector(CGPoint: topLeft),
"inputTopRight": CIVector(CGPoint: topRight),
"inputBottomLeft": CIVector(CGPoint: bottomLeft),
"inputBottomRight": CIVector(CGPoint: bottomRight)
])
return overlay.imageByCompositingOverImage(image)
}
}

0 comments on commit fb7b60c

Please sign in to comment.