Skip to content

Commit

Permalink
Add a video display which routes the camera through CoreImage
Browse files Browse the repository at this point in the history
No detectors or anything yet
  • Loading branch information
Sam Davies committed Jul 30, 2014
1 parent 898e662 commit 788c2b5
Show file tree
Hide file tree
Showing 2 changed files with 99 additions and 5 deletions.
Original file line number Diff line number Diff line change
@@ -1,19 +1,19 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="6162" systemVersion="14A238h" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" initialViewController="BYZ-38-t0r">
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="6185.11" systemVersion="13E28" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" initialViewController="BYZ-38-t0r">
<dependencies>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="6160"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="6190.4"/>
</dependencies>
<scenes>
<!--View Controller-->
<scene sceneID="tne-QT-ifu">
<objects>
<viewController id="BYZ-38-t0r" customClass="ViewController" customModuleProvider="target" sceneMemberID="viewController">
<viewController id="BYZ-38-t0r" customClass="ViewController" customModule="LiveDetection" customModuleProvider="target" sceneMemberID="viewController">
<layoutGuides>
<viewControllerLayoutGuide type="top" id="y3c-jy-aDJ"/>
<viewControllerLayoutGuide type="bottom" id="wfy-db-euE"/>
</layoutGuides>
<view key="view" contentMode="scaleToFill" id="8bC-Xf-vdC">
<rect key="frame" x="0.0" y="0.0" width="480" height="480"/>
<rect key="frame" x="0.0" y="0.0" width="600" height="600"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<color key="backgroundColor" white="1" alpha="1" colorSpace="custom" customColorSpace="calibratedWhite"/>
</view>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,106 @@
//

import UIKit
import GLKit
import AVFoundation
import CoreMedia
import CoreImage
import OpenGLES
import QuartzCore

class ViewController: UIViewController {
class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {

var videoDisplayView: GLKView!
var videoDisplayViewBounds: CGRect!
var renderContext: CIContext!

var avSession: AVCaptureSession!
var sessionQueue: dispatch_queue_t!

override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
videoDisplayView = GLKView(frame: view.bounds, context: EAGLContext(API: .OpenGLES2))
videoDisplayView.transform = CGAffineTransformMakeRotation(CGFloat(M_PI_2))
videoDisplayView.frame = view.bounds
view.addSubview(videoDisplayView)

renderContext = CIContext(EAGLContext: videoDisplayView.context)
sessionQueue = dispatch_queue_create("AVSessionQueue", DISPATCH_QUEUE_SERIAL)

videoDisplayView.bindDrawable()
videoDisplayViewBounds = CGRect(x: 0, y: 0, width: videoDisplayView.drawableWidth, height: videoDisplayView.drawableHeight)

// Start the video capture process
start()
}

deinit {
dispatch_release(sessionQueue)
}

func start() {
// Input from video camera
let device = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
var error: NSError?
let input = AVCaptureDeviceInput(device: device, error: &error)

// Start out with low quality
avSession = AVCaptureSession()
avSession.sessionPreset = AVCaptureSessionPresetMedium

// Output
let videoOutput = AVCaptureVideoDataOutput()

videoOutput.videoSettings = [ kCVPixelBufferPixelFormatTypeKey: kCVPixelFormatType_32BGRA]
videoOutput.alwaysDiscardsLateVideoFrames = true
videoOutput.setSampleBufferDelegate(self, queue: sessionQueue)

// Join it all together
avSession.addInput(input)
avSession.addOutput(videoOutput)

// And kick it off
avSession.startRunning()
}

//MARK: <AVCaptureVideoDataOutputSampleBufferDelegate
func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {

// Need to shimmy this through type-hell
let opaqueBuffer = CMSampleBufferGetImageBuffer(sampleBuffer).toOpaque()
let imageBuffer = Unmanaged<CVPixelBuffer>.fromOpaque(opaqueBuffer).takeUnretainedValue()
let sourceImage = CIImage(CVPixelBuffer: imageBuffer, options: nil)

// Do some clipping
var drawFrame = sourceImage.extent()
let imageAR = drawFrame.width / drawFrame.height
let viewAR = videoDisplayViewBounds.width / videoDisplayViewBounds.height
if imageAR > viewAR {
drawFrame.origin.x += (drawFrame.width - drawFrame.height * viewAR) / 2.0
drawFrame.size.width = drawFrame.height / viewAR
} else {
drawFrame.origin.y += (drawFrame.height - drawFrame.width / viewAR) / 2.0
drawFrame.size.height = drawFrame.width / viewAR
}

videoDisplayView.bindDrawable()
if videoDisplayView.context != EAGLContext.currentContext() {
EAGLContext.setCurrentContext(videoDisplayView.context)
}

// clear eagl view to grey
glClearColor(0.5, 0.5, 0.5, 1.0);
glClear(0x00004000)

// set the blend mode to "source over" so that CI will use that
glEnable(0x0BE2);
glBlendFunc(1, 0x0303);

renderContext.drawImage(sourceImage, inRect: videoDisplayViewBounds, fromRect: drawFrame)

videoDisplayView.display()

}

}
Expand Down

0 comments on commit 788c2b5

Please sign in to comment.