Set up camera on the background of UIView
UPDATED TO SWIFT 5
You could try something like this:
I add two UIView
s to my UIViewController
's main view, one called previewView
(for the camera) and another UIView
called boxView
(which is above the camera view)
class ViewController: UIViewController {
var previewView : UIView!
var boxView:UIView!
//Camera Capture requiered properties
var videoDataOutput: AVCaptureVideoDataOutput!
var videoDataOutputQueue: DispatchQueue!
var previewLayer:AVCaptureVideoPreviewLayer!
var captureDevice : AVCaptureDevice!
let session = AVCaptureSession()
var currentFrame: CIImage!
var done = false
override func viewDidLoad() {
super.viewDidLoad()
previewView = UIView(frame: CGRect(x: 0, y: 0, width: UIScreen.main.bounds.size.width, height: UIScreen.main.bounds.size.height))
previewView.contentMode = .scaleAspectFit
view.addSubview(previewView)
//Add a box view
boxView = UIView(frame: CGRect(x: 0, y: 0, width: 100, height: 200))
boxView.backgroundColor = UIColor.green
boxView.alpha = 0.3
view.addSubview(boxView)
self.setupAVCapture()
}
override func viewWillAppear(_ animated: Bool) {
if !done {
session.startRunning()
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
override var shouldAutorotate: Bool {
if (UIDevice.current.orientation == UIDeviceOrientation.landscapeLeft ||
UIDevice.current.orientation == UIDeviceOrientation.landscapeRight ||
UIDevice.current.orientation == UIDeviceOrientation.unknown) {
return false
}
else {
return true
}
}
}
// AVCaptureVideoDataOutputSampleBufferDelegate protocol and related methods
extension ViewController: AVCaptureVideoDataOutputSampleBufferDelegate{
func setupAVCapture(){
session.sessionPreset = AVCaptureSession.Preset.vga640x480
guard let device = AVCaptureDevice
.default(AVCaptureDevice.DeviceType.builtInWideAngleCamera,
for: .video,
position: AVCaptureDevice.Position.front) else{
return
}
captureDevice = device
beginSession()
done = true
}
func beginSession(){
var deviceInput: AVCaptureDeviceInput!
do {
deviceInput = try AVCaptureDeviceInput(device: captureDevice)
guard deviceInput != nil else {
print("error: cant get deviceInput")
return
}
if self.session.canAddInput(deviceInput){
self.session.addInput(deviceInput)
}
videoDataOutput = AVCaptureVideoDataOutput()
videoDataOutput.alwaysDiscardsLateVideoFrames=true
videoDataOutputQueue = DispatchQueue(label: "VideoDataOutputQueue")
videoDataOutput.setSampleBufferDelegate(self, queue:self.videoDataOutputQueue)
if session.canAddOutput(self.videoDataOutput){
session.addOutput(self.videoDataOutput)
}
videoDataOutput.connection(with: AVMediaType.video)?.isEnabled = true
self.previewLayer = AVCaptureVideoPreviewLayer(session: self.session)
self.previewLayer.videoGravity = AVLayerVideoGravity.resizeAspect
let rootLayer: CALayer = self.previewView.layer
rootLayer.masksToBounds = true
self.previewLayer.frame = rootLayer.bounds
rootLayer.addSublayer(self.previewLayer)
session.startRunning()
} catch let error as NSError {
deviceInput = nil
print("error: \(error.localizedDescription)")
}
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
currentFrame = self.convertImageFromCMSampleBufferRef(sampleBuffer)
}
// clean up AVCapture
func stopCamera(){
session.stopRunning()
done = false
}
func convertImageFromCMSampleBufferRef(_ sampleBuffer:CMSampleBuffer) -> CIImage{
let pixelBuffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)!
let ciImage:CIImage = CIImage(cvImageBuffer: pixelBuffer)
return ciImage
}
}
You can replace the boxView's frame
with mainView's frame
and don't set its background property. This way you can use this view to add more subviews.
IMPORTANT
Remember that in iOS 10 you need to first ask the user for permission in order to have access to the camera. You do this by adding a usage key to your app’s
Info.plist
together with a purpose string because if you fail to declare the usage, your app will crash when it first makes the access.
Here's a screenshot to show the Camera access request
I hope this can help!
An other way, SceneView is useful for augmented reality applications.
- Create a preview layer with AVFramework or UIView, then add preview layer to view's sublayer.
- Create and custumize a sceneview. Then add sceneview to view's subview.
Create and custimize scene. Finally add to scenview's scene.
// 1. Create a preview layer with AVFramework or UIView, then add preview layer to view's sublayer. self.previewLayer!.frame = view.layer.bounds view.clipsToBounds = true view.layer.addSublayer(self.previewLayer!) // 2. Create and custumize a sceneview. Then add sceneview to view's subview. let sceneView = SCNView() sceneView.frame = view.bounds sceneView.backgroundColor = UIColor.clearColor() self.previewLayer!.frame = view.bounds view.addSubview(sceneView) // 3 . Create and custimize scene. Finally add to scenview's scene. let scene = SCNScene() sceneView.autoenablesDefaultLighting = true sceneView.allowsCameraControl = true let boxGeometry = SCNBox(width: 800 , height: 400, length: 1.0, chamferRadius: 1.0) let yellow = UIColor.yellowColor() let semi = yellow.colorWithAlphaComponent(0.3) boxGeometry.firstMaterial?.diffuse.contents = semi let boxNode = SCNNode(geometry: boxGeometry) scene.rootNode.addChildNode(boxNode) sceneView.scene = scene