KRUBERLICK
4/10/2017 - 12:59 PM

Media capture tools

Media capture tools

import AVFoundation
import SDAVAssetExportSession
import RxSwift

class MediaPostProcessor {
    func cropVideoToSquareAndEncodeToMP4(from videoURL: URL, 
                                         to outputURL: URL, 
                                         videoOrientation: AVCaptureVideoOrientation,
                                         squareSize: CGFloat) -> Observable<Bool> {
        return Observable.create { observer in
            let asset = AVURLAsset(url: videoURL)
            guard let sourceVideoTrack = asset.tracks(withMediaType: AVMediaTypeVideo).first,
                let sourceAudioTrack = asset.tracks(withMediaType: AVMediaTypeAudio).first else {
                    observer.onError(NSError())
                    return Disposables.create()
            }
            let mixComposition = AVMutableComposition()
            let videoTrack = mixComposition.addMutableTrack(
                withMediaType: AVMediaTypeVideo, 
                preferredTrackID: kCMPersistentTrackID_Invalid
            )
            let audioTrack = mixComposition.addMutableTrack(
                withMediaType: AVMediaTypeAudio, 
                preferredTrackID: kCMPersistentTrackID_Invalid
            )

            do {
                try videoTrack.insertTimeRange(
                    CMTimeRange(
                        start: kCMTimeZero, 
                        duration: asset.duration
                    ),
                    of: sourceVideoTrack, 
                    at: kCMTimeZero
                )
                try audioTrack.insertTimeRange(
                    CMTimeRange(
                        start: kCMTimeZero, 
                        duration: asset.duration
                    ),
                    of: sourceAudioTrack, 
                    at: kCMTimeZero
                )

                let instruction = AVMutableVideoCompositionInstruction()

                instruction.timeRange = CMTimeRange(
                    start: kCMTimeZero, 
                    duration: asset.duration
                )

                let transformer = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
                let scaleFactor: CGFloat = squareSize / sourceVideoTrack.naturalSize.height
                let sourceSize = sourceVideoTrack.naturalSize
                var rotation: CGAffineTransform
                var translation: CGAffineTransform

                switch videoOrientation {
                case .portrait:
                    rotation = CGAffineTransform(rotationAngle: CGFloat.pi / 2)
                    translation = CGAffineTransform(
                        translationX: sourceSize.height, 
                        y: -(sourceSize.width - sourceSize.height) / 2
                    )
                case .landscapeRight:
                    rotation = CGAffineTransform.identity
                    translation = CGAffineTransform(
                        translationX: -(sourceSize.width - sourceSize.height) / 2, 
                        y: 0
                    )
                case .landscapeLeft:
                    rotation = CGAffineTransform(rotationAngle: CGFloat.pi)
                    translation = CGAffineTransform(
                        translationX: sourceSize.width - (sourceSize.width - sourceSize.height) / 2, 
                        y: sourceSize.height
                    )
                case .portraitUpsideDown:
                    rotation = CGAffineTransform(rotationAngle: 3 * CGFloat.pi / 2)
                    translation = CGAffineTransform(
                        translationX: 0, 
                        y: sourceSize.height + (sourceSize.width - sourceSize.height) / 2
                    )
                }

                let scale = CGAffineTransform(scaleX: scaleFactor, y: scaleFactor)
                let transform = rotation.concatenating(translation).concatenating(scale)

                transformer.setTransform(transform, at: kCMTimeZero)
                instruction.layerInstructions = [transformer]

                let videoComposition = AVMutableVideoComposition()

                videoComposition.frameDuration = CMTimeMake(1, 60)
                videoComposition.renderSize = CGSize(width: squareSize, height: squareSize)
                videoComposition.instructions = [instruction]

                guard let exportSession = SDAVAssetExportSession(asset: asset) else {
                    observer.onError(NSError())
                    return Disposables.create()
                }

                exportSession.outputURL = outputURL
                exportSession.outputFileType = AVFileTypeMPEG4
                exportSession.shouldOptimizeForNetworkUse = true
                exportSession.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration)
                exportSession.videoComposition = videoComposition
                exportSession.videoSettings = [AVVideoCodecKey: AVVideoCodecH264,
                                               AVVideoWidthKey: squareSize,
                                               AVVideoHeightKey: squareSize,
                                               AVVideoCompressionPropertiesKey: [
                                                AVVideoAverageBitRateKey: 3000000,
                                                AVVideoProfileLevelKey: AVVideoProfileLevelH264HighAutoLevel]]
                exportSession.audioSettings = [AVFormatIDKey: kAudioFormatMPEG4AAC,
                                               AVNumberOfChannelsKey: 2,
                                               AVSampleRateKey: 44100,
                                               AVEncoderBitRateKey: 128000]
                exportSession.exportAsynchronously(completionHandler: {
                    switch exportSession.status {
                    case .failed:
                        observer.onError(exportSession.error)
                    default:
                        observer.onNext(true)
                        observer.onCompleted()
                        break
                    }
                })
            }
            catch let error {
                observer.onError(error)
            }
            return Disposables.create()
        }
    }
}
import AVFoundation
import RxSwift

class CaptureSessionManager: NSObject {

    // MARK: - ENUMERATIONS
    enum CaptureMode {
        case photo
        case video
    }

    enum VideoRecordingStatus {
        case recording
        case stopped
    }

    // MARK: - PRIVATE PROPERTIES
    private var captureMode: CaptureMode
    private let sessionQueue = DispatchQueue(label: "com.agakat.Agakat2.CaptureSessionManagerQueue")
    private var captureSession: AVCaptureSession!
    private var previewLayer: AVCaptureVideoPreviewLayer!
    private var previewLayerSuperlayer: CALayer
    private var cameraDevice: AVCaptureDevice!
    private var cameraPosition: AVCaptureDevicePosition
    private var cameraFlashMode: AVCaptureFlashMode = .off
    private var cameraTorchMode: AVCaptureTorchMode = .off
    private var activeInput: AVCaptureDeviceInput!
    private var stillImageOutput: AVCaptureStillImageOutput!
    private var movieFileOutput: AVCaptureMovieFileOutput!
    private let previewLayerFrameUpdateDisposeBag = DisposeBag()
    private var cameraObserversDisposeBag = DisposeBag()

    // MARK: - PUBLIC PROPERTIES
    let videoRecordingStatusNotifier = PublishSubject<VideoRecordingStatus>()
    let cameraFocusAdjustingNotifier = PublishSubject<Bool>()
    let cameraRampingVideoZoomNotifier = PublishSubject<Bool>()
    let cameraIsFocusPointFixed = Variable<Bool>(false)

    // MARK: - INITIALIZATION
    init(mode: CaptureMode,
         cameraPosition: AVCaptureDevicePosition,
         previewLayerSuperlayer: CALayer) {
        self.captureMode = mode
        self.cameraPosition = cameraPosition
        self.previewLayerSuperlayer = previewLayerSuperlayer
        super.init()
        self.previewLayerSuperlayer.rx.observe(CGRect.self, "bounds")
            .observeOn(MainScheduler.instance)
            .subscribe(onNext: { bounds in
                if let bounds = bounds,
                    let previewLayer = self.previewLayer {
                    previewLayer.frame = bounds
                }
            })
            .addDisposableTo(self.previewLayerFrameUpdateDisposeBag)
        NotificationCenter.default.addObserver(
            self, 
            selector: #selector(CaptureSessionManager.resetFocus), 
            name: NSNotification.Name.AVCaptureDeviceSubjectAreaDidChange, 
            object: nil
        )
    }

    // MARK: - HELPER METHODS
    private func execute(onSessionQueue: Bool, statements: @escaping () -> ()) {
        if onSessionQueue {
            self.sessionQueue.async {
                statements()
            }
        }
        else {
            statements()
        }
    }

    // MARK: - CAMERA OBSERVERS SETUP
    private func setupCameraObservers() {
        self.cameraObserversDisposeBag = DisposeBag()
        (self.cameraDevice as NSObject).rx.observe(Bool.self, "adjustingFocus")
            .subscribe(onNext: { value in
                guard let value = value else {
                    return
                }
                self.cameraFocusAdjustingNotifier.onNext(value)
            })
            .addDisposableTo(self.cameraObserversDisposeBag)
        (self.cameraDevice as NSObject).rx.observe(Bool.self, "rampingVideoZoom")
            .subscribe(onNext: { value in
                guard let value = value else {
                    return
                }
                self.cameraRampingVideoZoomNotifier.onNext(value)
            })
            .addDisposableTo(self.cameraObserversDisposeBag)
    }

    // MARK: - CAMERA FLASH/TORCH MODE
    func setFlashMode(_ flashMode: AVCaptureFlashMode,
                      onSessionQueue: Bool = true) {
        guard self.cameraDevice != nil else {
            fatalError("Camera device not configured")
        }
        if self.cameraDevice.hasFlash
            && self.cameraDevice.isFlashModeSupported(flashMode) {
            self.execute(onSessionQueue: onSessionQueue, statements: {
                do {
                    try self.cameraDevice.lockForConfiguration()
                    if self.cameraDevice.hasTorch
                        && self.cameraDevice.isTorchModeSupported(.off) {
                        self.cameraDevice.torchMode = .off
                    }
                    self.cameraDevice.flashMode = flashMode
                    self.cameraDevice.unlockForConfiguration()
                }
                catch let error {
                    fatalError("\(error)")
                }
            })
        }
    }

    func setTorchMode(_ torchMode: AVCaptureTorchMode,
                      onSessionQueue: Bool = true) {
        guard self.cameraDevice != nil else {
            fatalError("Camera device not configured")
        }
        if self.cameraDevice.hasTorch
            && self.cameraDevice.isTorchModeSupported(torchMode) {
            self.execute(onSessionQueue: onSessionQueue, statements: {
                do {
                    try self.cameraDevice.lockForConfiguration()
                    if self.cameraDevice.hasFlash &&
                        self.cameraDevice.isFlashModeSupported(.off) {
                        self.cameraDevice.flashMode = .off
                    }
                    self.cameraDevice.torchMode = torchMode
                    self.cameraDevice.unlockForConfiguration()
                }
                catch let error {
                    fatalError("\(error)")
                }
            })
        }
    }

    func isFlashModeSupported(_ flashMode: AVCaptureFlashMode) -> Bool {
        guard self.cameraDevice != nil else {
            fatalError("Camera device not configured")
        }
        guard self.cameraDevice.isFlashAvailable else {
            return false
        }
        return self.cameraDevice.isFlashModeSupported(flashMode)
    }

    func isTorchModeSupported(_ torchMode: AVCaptureTorchMode) -> Bool {
        guard self.cameraDevice != nil else {
            fatalError("Camera device not configured")
        }
        guard self.cameraDevice.isTorchAvailable else {
            return false
        }
        return self.cameraDevice.isTorchModeSupported(torchMode)
    }

    func currentFlashMode() -> AVCaptureFlashMode {
        return self.cameraDevice.flashMode
    }

    func currentTorchMode() -> AVCaptureTorchMode {
        return self.cameraDevice.torchMode
    }

    // MARK: - CAMERA FOCUS/EXPOSURE SETUP
    func setCameraFocus(at point: CGPoint, continuous: Bool) {
        guard self.cameraDevice != nil else {
            return
        }
        self.execute(onSessionQueue: true) {
            guard !self.cameraDevice.isRampingVideoZoom else {
                return
            }
            do {
                try self.cameraDevice.lockForConfiguration()
                if self.cameraDevice.isFocusPointOfInterestSupported {
                    self.cameraDevice.focusPointOfInterest = point
                }
                if self.cameraDevice.isFocusModeSupported(.continuousAutoFocus) {
                    self.cameraDevice.focusMode = .continuousAutoFocus
                }
                else if self.cameraDevice.isFocusModeSupported(.autoFocus) {
                    self.cameraDevice.focusMode = .autoFocus
                }
                if self.cameraDevice.isExposurePointOfInterestSupported {
                    self.cameraDevice.exposurePointOfInterest = point
                }
                if self.cameraDevice.isExposureModeSupported(.continuousAutoExposure) {
                    self.cameraDevice.exposureMode = .continuousAutoExposure
                }
                else if self.cameraDevice.isExposureModeSupported(.autoExpose) {
                    self.cameraDevice.exposureMode = .autoExpose
                }
                if self.cameraDevice.isWhiteBalanceModeSupported(.continuousAutoWhiteBalance) {
                    self.cameraDevice.whiteBalanceMode = .continuousAutoWhiteBalance
                }
                else if self.cameraDevice.isWhiteBalanceModeSupported(.autoWhiteBalance) {
                    self.cameraDevice.whiteBalanceMode = .autoWhiteBalance
                }
                self.cameraIsFocusPointFixed.value = !continuous
                    && (self.cameraDevice.isFocusPointOfInterestSupported
                    || self.cameraDevice.isExposurePointOfInterestSupported)
            }
            catch {}
        }
    }

    func resetFocus() {
        guard self.cameraDevice != nil else {
            return
        }
        if !self.cameraIsFocusPointFixed.value {
            return
        }
        self.setCameraFocus(at: CGPoint(x: 0.5, y: 0.5), continuous: true)
    }
    
    // MARK: - ZOOM RAMPING
    func rampCameraZoom(to videoZoomFactor: CGFloat, withRate rate: CGFloat) {
        do {
            try self.camera.lockForConfiguration()
            self.cameraDevice.ramp(toVideoZoomFactor: videoZoomFactor, withRate: rate)
            self.cameraDevice.unlockForConfiguration()
        }
        catch let error {
            fatalError("\(error)")
        }
    }

    // MARK: - CAPTURE SESSION MANAGING
    func setupCaptureSession(onSessionQueue: Bool = true) -> Observable<Bool> {
        return Observable.create { observer in
            self.execute(onSessionQueue: onSessionQueue) {
                self.captureSession = AVCaptureSession()
                if self.previewLayer != nil {
                    DispatchQueue.main.async {
                        self.previewLayer.removeFromSuperlayer()
                    }
                }
                self.previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
                self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
                DispatchQueue.main.async {
                    self.previewLayer.frame = self.previewLayerSuperlayer.bounds
                    self.previewLayerSuperlayer.insertSublayer(self.previewLayer, at: 0)
                }
                self.captureSession.beginConfiguration()
                self.captureSession.sessionPreset = self.captureMode == .photo
                    ? AVCaptureSessionPresetPhoto
                    : AVCaptureSessionPresetHigh

                let videoDevices = AVCaptureDevice
                    .devices(withMediaType: AVMediaTypeVideo) as! [AVCaptureDevice]

                for device in videoDevices {
                    if device.position == self.cameraPosition {
                        self.cameraDevice = device
                    }
                }
                guard self.cameraDevice != nil else {
                    observer.onError(NSError())
                    return
                }
                do {
                    try self.cameraDevice.lockForConfiguration()
                    self.cameraDevice.videoZoomFactor = 1
                    self.cameraDevice.isSubjectAreaChangeMonitoringEnabled = true
                    if self.cameraDevice.isLowLightBoostSupported {
                        self.cameraDevice.automaticallyEnablesLowLightBoostWhenAvailable = true
                    }
                    self.cameraDevice.unlockForConfiguration()
                    self.setupCameraObservers()
                }
                catch let error {
                    observer.onError(error)
                    return
                }

                guard let videoInput = try? AVCaptureDeviceInput(device: self.cameraDevice) else {
                    observer.onError(NSError())
                    return
                }

                self.activeInput = videoInput

                guard let microphoneDevice = AVCaptureDevice
                    .defaultDevice(withMediaType: AVMediaTypeAudio),
                    let micInput = try? AVCaptureDeviceInput(device: microphoneDevice) else {
                        observer.onError(NSError())
                        return
                }

                if self.captureSession.canAddInput(videoInput)
                    && self.captureSession.canAddInput(micInput) {
                    self.captureSession.addInput(videoInput)
                    self.captureSession.addInput(micInput)
                }
                else {
                    observer.onError(NSError())
                    return
                }
                self.stillImageOutput = AVCaptureStillImageOutput()
                self.movieFileOutput = AVCaptureMovieFileOutput()
                if self.captureSession.canAddOutput(self.stillImageOutput)
                    && self.captureSession.canAddOutput(self.movieFileOutput) {
                    self.captureSession.addOutput(self.movieFileOutput)
                    self.captureSession.addOutput(self.stillImageOutput)
                }
                else {
                    observer.onError(NSError())
                    return
                }

                if let movieFileOutputConnection = self.movieFileOutput
                    .connection(withMediaType: AVMediaTypeVideo) {
                    if movieFileOutputConnection.isVideoStabilizationSupported {
                        movieFileOutputConnection.preferredVideoStabilizationMode = .cinematic
                    }
                }

                self.captureSession.commitConfiguration()
                self.captureSession.startRunning()
                observer.onNext(true)
                observer.onCompleted()
            }
            return Disposables.create()
        }
    }

    func teardownCapture(onSessionQueue: Bool = true) -> Observable<Bool> {
        return Observable.create { observer in
            self.execute(onSessionQueue: onSessionQueue) {
                if self.movieFileOutput.isRecording {
                    self.movieFileOutput.stopRecording()
                }
                guard self.captureSession != nil else {
                    observer.onError(NSError())
                    return
                }
                if self.captureSession.isRunning {
                    self.captureSession.stopRunning()
                }
                if self.previewLayer != nil {
                    DispatchQueue.main.async {
                        self.previewLayer.removeFromSuperlayer()
                    }
                }
                self.captureSession = nil
                self.cameraDevice = nil
                observer.onNext(true)
                observer.onCompleted()
            }
            return Disposables.create()
        }
    }

    func pauseCapture(onSessionQueue: Bool = true) -> Observable<Bool> {
        return Observable.create { observer in
            self.execute(onSessionQueue: onSessionQueue) {
                guard self.captureSession != nil else {
                    observer.onError(NSError())
                    return
                }
                self.captureSession.stopRunning()
                observer.onNext(true)
                observer.onCompleted()
            }
            return Disposables.create()
        }
    }

    func resumeCapture(onSessionQueue: Bool = true) -> Observable<Bool> {
        return Observable.create { observer in
            self.execute(onSessionQueue: onSessionQueue) {
                guard self.captureSession != nil else {
                    observer.onError(NSError())
                    return
                }
                self.captureSession.startRunning()
                observer.onNext(true)
                observer.onCompleted()
            }
            return Disposables.create()
        }
    }

    func switchCaptureMode(to newCaptureMode: CaptureMode,
                           onSessionQueue: Bool = true) -> Observable<Bool> {
        return Observable.create { observer in
            self.execute(onSessionQueue: onSessionQueue) {
                self.setFlashMode(.off, onSessionQueue: false)
                self.setTorchMode(.off, onSessionQueue: false)
                self.captureSession.beginConfiguration()
                switch newCaptureMode {
                case .photo:
                    self.captureSession.sessionPreset = AVCaptureSessionPresetPhoto
                case .video:
                    self.captureSession.sessionPreset = AVCaptureSessionPresetHigh
                }
                self.captureSession.commitConfiguration()
                observer.onNext(true)
                observer.onCompleted()
            }
            return Disposables.create()
        }
    }

    // MARK: - CAMERA SWITCHING
    func switchCameraPosition(to newPosition: AVCaptureDevicePosition,
                              onSessionQueue: Bool = true) -> Observable<Bool> {
        return Observable.create { observer in
            self.execute(onSessionQueue: onSessionQueue) {
                guard self.cameraDevice != nil else {
                    observer.onError(NSError())
                    return
                }
                do {
                    let videoDevices = AVCaptureDevice
                        .devices(withMediaType: AVMediaTypeVideo) as! [AVCaptureDevice]

                    for device in videoDevices {
                        if device.position == newPosition {
                            self.cameraDevice = device
                            break
                        }
                    }
                    self.captureSession.removeInput(self.activeInput)
                    self.activeInput = try AVCaptureDeviceInput(device: self.cameraDevice)
                    if self.captureSession.canAddInput(self.activeInput) {
                        self.captureSession.addInput(self.activeInput)
                    }
                    else {
                        observer.onError(NSError())
                        return
                    }
                    observer.onNext(true)
                    observer.onCompleted()
                }
                catch let error {
                    observer.onError(error)
                }
            }
            return Disposables.create()
        }
    }

    // MARK: - PHOTO/VIDEO CAPTURE
    func capturePhoto() -> Observable<UIImage> {
        return Observable.create { observer in
            guard self.stillImageOutput != nil else {
                observer.onError(NSError())
                return Disposables.create()
            }

            let connection = self.stillImageOutput.connection(withMediaType: AVMediaTypeVideo)

            self.stillImageOutput.captureStillImageAsynchronously(
                from: connection,
                completionHandler: { sampleBuffer, error in
                    if let error = error {
                        observer.onError(error)
                        return
                    }
                    self.execute(onSessionQueue: true) {
                        guard let imageData = AVCaptureStillImageOutput
                            .jpegStillImageNSDataRepresentation(sampleBuffer),
                            let image = UIImage(data: imageData) else {
                                observer.onError(NSError())
                                return
                        }

                        observer.onNext(image)
                        observer.onCompleted()
                    }
            })
            return Disposables.create()
        }
    }

    func startCapturingVideo(to outputURL: URL) -> Observable<Bool> {
        return Observable.create { observer in
            guard self.movieFileOutput != nil else {
                observer.onError(NSError())
                return Disposables.create()
            }
            self.execute(onSessionQueue: true) {
                self.movieFileOutput.startRecording(
                    toOutputFileURL: outputURL,
                    recordingDelegate: self
                )
                observer.onNext(true)
                observer.onCompleted()
            }
            return Disposables.create()
        }
    }

    func stopCapturingVideo() -> Observable<Bool> {
        return Observable.create { observer in
            guard self.movieFileOutput != nil else {
                observer.onError(NSError())
                return Disposables.create()
            }
            self.execute(onSessionQueue: true) {
                self.movieFileOutput.stopRecording()
                observer.onNext(true)
                observer.onCompleted()
            }
            return Disposables.create()
        }
    }
}

// MARK: - AVCaptureFileOutputRecordingDelegate
extension CaptureSessionManager: AVCaptureFileOutputRecordingDelegate {
    func capture(_ captureOutput: AVCaptureFileOutput!,
                 didStartRecordingToOutputFileAt fileURL: URL!,
                 fromConnections connections: [Any]!) {
        self.videoRecordingStatusNotifier.onNext(.recording)
    }

    func capture(_ captureOutput: AVCaptureFileOutput!,
                 didFinishRecordingToOutputFileAt outputFileURL: URL!,
                 fromConnections connections: [Any]!,
                 error: Error!) {
        self.videoRecordingStatusNotifier.onNext(.stopped)
    }
}
import CoreMotion
import AVFoundation
import RxSwift

class CaptureDeviceOrientationManager {
    // MARK: - PRIVATE PROPERTIES
    private let motionManager = CMMotionManager()
    private let operationQueue = OperationQueue()

    // MARK: - PUBLIC PROPERTIES
    var isDeviceMotionActive: Bool {
        return self.motionManager.isDeviceMotionActive
    }

    let avOrientationChangeNotifier = PublishSubject<AVCaptureVideoOrientation>()
    let deviceOrientationChangeNotifier = PublishSubject<UIDeviceOrientation>()

    // MARK: - INITIALIZATION
    init() {
        self.resumeMotionUpdates()
    }

    // MARK: - MOTION UPDATES START/STOP
    func pauseMotionUpdates() {
        self.motionManager.stopDeviceMotionUpdates()
    }

    func resumeMotionUpdates() {
        self.motionManager
            .startDeviceMotionUpdates(using: .xArbitraryZVertical,
                                      to: self.operationQueue) { [unowned self] motion, error in
                                        if let error = error {
                                            self.avOrientationChangeNotifier.onError(error)
                                            self.deviceOrientationChangeNotifier.onError(error)
                                            return
                                        }

                                        guard let attitude = motion?.attitude else {
                                            return
                                        }

                                        self.avOrientationChangeNotifier.onNext(
                                            self.avOrientation(
                                                for: self.deviceOrientation(
                                                    fromAttitude: attitude
                                                )
                                            )
                                        )
                                        self.deviceOrientationChangeNotifier
                                            .onNext(self.deviceOrientation(fromAttitude: attitude))
        }
    }

    // MARK: - CURRENT DEVICE/AV ORIENTATION GETTERS
    func getCurrentDeviсeOrientation() -> UIDeviceOrientation {
        if let attitude = self.motionManager.deviceMotion?.attitude {
            return self.deviceOrientation(fromAttitude: attitude)
        }
        return .portrait
    }

    func getCurrentAVOrientation() -> AVCaptureVideoOrientation {
        return self.avOrientation(for: self.getCurrentDeviсeOrientation())
    }

    // MARK: - HELPER METHODS
    private func deviceOrientation(fromAttitude attitude: CMAttitude) -> UIDeviceOrientation {
        let roll = radiansToDegrees(attitude.roll)
        let pitch = radiansToDegrees(attitude.pitch)
        let calcQuarter: (Double) -> Int = { (angle) in
            if angle > -29 && angle <= 29 {
                return 0
            }
            else if angle > 29 && angle <= 151 {
                return 1
            }
            else if angle > 151 || angle <= -151 {
                return 2
            }
            else if angle > -151 && angle <= -29 {
                return 3
            }
            else {
                return -1
            }
        }
        let rollQuarter = calcQuarter(roll)
        let pitchQuarter = calcQuarter(pitch)
        let orientation: UIDeviceOrientation

        switch (rollQuarter, pitchQuarter) {
        case (0, 0), (2, 2):
            orientation = .faceUp
        case (0, 2), (2, 0):
            orientation = .faceDown
        case (_, 1):
            orientation = .portrait
        case (_, 3):
            orientation = .portraitUpsideDown
        case (1, 0), (3, 2):
            orientation = .landscapeRight
        case (3, 0), (1, 2):
            orientation = .landscapeLeft
        default:
            orientation = .unknown
        }
        return orientation
    }

    private func avOrientation(for deviceOrientation: UIDeviceOrientation) -> AVCaptureVideoOrientation {
        switch deviceOrientation {
        case .landscapeLeft:
            return .landscapeRight
        case .landscapeRight:
            return .landscapeLeft
        case .faceUp, .faceDown, .unknown:
            return .portrait
        default:
            return AVCaptureVideoOrientation(rawValue: deviceOrientation.rawValue)!
        }
    }
}