Swift2.1 Camera Application Demo

Swift2.1でAVFoundationを使って最高画質で撮影できるサンプルを作ってみました。 ググれば沢山出てくるのですがSwiftのバージョンが古いものばかりで苦労したのでメモ替わりに記事にしておきます。

import UIKit
import AVFoundation


class ViewController: UIViewController {

    var deviceInput:AVCaptureDeviceInput!
    var stillImageOutput: AVCaptureStillImageOutput!
    var session: AVCaptureSession!
    var captureSession: AVCaptureSession!
    var videoconnection: AVCaptureConnection!

    override func viewDidLoad() {
        super.viewDidLoad()
        // Do any additional setup after loading the view, typically from a nib.

        let notificationCenter = NSNotificationCenter.defaultCenter()
        
        //アプリがアクティブになったとき
        notificationCenter.addObserver(
            self,
            selector: "avCamera",
            name:UIApplicationDidBecomeActiveNotification,
            object: nil)
        
    }
    
    func avCamera() -> Bool {
        // init camera device
        var captureDevice: AVCaptureDevice?
        let devices: NSArray = AVCaptureDevice.devices()
        
        // find back camera
        for device: AnyObject in devices {
            if device.position == AVCaptureDevicePosition.Back {
                captureDevice = device as? AVCaptureDevice
            }
        }
        
        if captureDevice == nil {
            return false
        }
        
        // init device input
        do {
            deviceInput = try AVCaptureDeviceInput(device: captureDevice) as AVCaptureDeviceInput
        } catch let error as NSError {
            print(error)
        }
        
        self.stillImageOutput = AVCaptureStillImageOutput()
        
        // init session
        self.session = AVCaptureSession()
        self.session.sessionPreset = AVCaptureSessionPresetPhoto // 最高画質に設定
        self.session.addInput(deviceInput as AVCaptureInput)
        self.session.addOutput(self.stillImageOutput)
        
        // layer for preview
        let previewLayer = AVCaptureVideoPreviewLayer(session: self.session)
        previewLayer.frame = self.view.bounds
        self.view.layer.addSublayer(previewLayer)
        
        self.session.startRunning()
        
        // UIボタンを作成.
        let myButton = UIButton(frame: CGRectMake(0,0,120,50))
        myButton.backgroundColor = UIColor.redColor();
        myButton.layer.masksToBounds = true
        myButton.setTitle("Shutter", forState: .Normal)
        myButton.layer.cornerRadius = 20.0
        myButton.layer.position = CGPoint(x: self.view.bounds.width/2, y:self.view.bounds.height-50)
        myButton.addTarget(self, action: "takePhoto:", forControlEvents: .TouchUpInside)
        
        // UIボタンをViewに追加.
        self.view.addSubview(myButton);

        return true
    }

    func takePhoto(sender: UIButton){
        // ビデオ出力に接続.
        let myVideoConnection = stillImageOutput.connectionWithMediaType(AVMediaTypeVideo)
        
        // 接続から画像を取得.
        self.stillImageOutput.captureStillImageAsynchronouslyFromConnection(
            myVideoConnection, completionHandler: {
            
                (imageDataBuffer, error) -> Void in
                
                // 取得したImageのDataBufferをJpegに変換.
                let myImageData : NSData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(imageDataBuffer)
                
                // JpegからUIIMageを作成.
                let myImage : UIImage = UIImage(data: myImageData)!
                
                // アルバムに追加.
                UIImageWriteToSavedPhotosAlbum(myImage, self, nil, nil)
            }
        )
    }

    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
        // Dispose of any resources that can be recreated.
    }

}