動画を回転させて保存する – Swift3

環境

  • AVFoundation
  • Swift3
  • Xcode9

説明

1、動画を横に向けて撮影する

2、撮影した動画を選択する

3、アルバムから選択した動画を回転させて保存する

実装

//  ViewController.swift
//  Shoot movies Swift3
//
//  Created by ryosuke-hujisawa on 2017/10/06.
//  Copyright © 2017年 ryosuke-hujisawa. All rights reserved.
//

import UIKit
import AVFoundation
import AssetsLibrary
import Photos

class ViewController: UIViewController, AVCaptureFileOutputRecordingDelegate, UIImagePickerControllerDelegate, UINavigationControllerDelegate {

    let imagePickerController = UIImagePickerController()
    var videoURL: URL?

    // 録画状態フラグ
    private var recording: Bool = false

    // ビデオのアウトプット
    private var myVideoOutput: AVCaptureMovieFileOutput!

    // ビデオレイヤー
    private var myVideoLayer: AVCaptureVideoPreviewLayer!

    // ボタン
    private var button: UIButton!

    private var choiceAlbum: UIButton!

    private var saveAlbum: UIButton!

    override func viewDidLoad() {
        super.viewDidLoad()


        // セッションの作成
        let session = AVCaptureSession()

        // 動画の画質
        if (session.canSetSessionPreset(AVCaptureSession.Preset.hd4K3840x2160)) {
            session.sessionPreset = AVCaptureSession.Preset.hd4K3840x2160
        } else if (session.canSetSessionPreset(AVCaptureSession.Preset.high)) {
            session.sessionPreset = AVCaptureSession.Preset.high
        } else if (session.canSetSessionPreset(AVCaptureSession.Preset.medium)) {
            session.sessionPreset = AVCaptureSession.Preset.medium
        } else if (session.canSetSessionPreset(AVCaptureSession.Preset.low)) {
            session.sessionPreset = AVCaptureSession.Preset.low
        }


        // 出力先を生成
        let myImageOutput = AVCapturePhotoOutput()

        // バックカメラを取得
        let camera = AVCaptureDevice.default(.builtInWideAngleCamera, for: AVMediaType.video, position: .back)
        let videoInput = try! AVCaptureDeviceInput.init(device: camera!)

        // ビデオをセッションのInputに追加
        session.addInput(videoInput)

        // マイクを取得
        let mic = AVCaptureDevice.default(.builtInMicrophone, for: AVMediaType.audio, position: .unspecified)
        let audioInput = try! AVCaptureDeviceInput.init(device: mic!)

        // オーディオをセッションに追加
        session.addInput(audioInput)

        // セッションに追加
        session.addOutput(myImageOutput)

        // 動画の保存
        myVideoOutput = AVCaptureMovieFileOutput()

        // ビデオ出力をOutputに追加
        session.addOutput(myVideoOutput)

        // 画像を表示するレイヤーを生成
        myVideoLayer = AVCaptureVideoPreviewLayer.init(session: session)
        myVideoLayer?.frame = self.view.bounds
        myVideoLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill

        // Viewに追加
        self.view.layer.addSublayer(myVideoLayer!)

        // セッション開始.
        session.startRunning()

        // UI
        button = UIButton(frame: CGRect(x: 0, y: 0, width: 120, height: 50))
        button.backgroundColor = .red
        button.layer.masksToBounds = true
        button.setTitle("START", for: .normal)
        button.layer.cornerRadius = 20.0
        button.layer.position = CGPoint(x: self.view.bounds.width/2, y:self.view.bounds.height-50)
        button.addTarget(self, action: #selector(ViewController.onTapButton), for: .touchUpInside)
        self.view.addSubview(button)


        // UI
        choiceAlbum = UIButton(frame: CGRect(x: 0, y: 0, width: 120, height: 50))
        choiceAlbum.backgroundColor = .red
        choiceAlbum.layer.masksToBounds = true
        choiceAlbum.setTitle("choiceAlbum", for: .normal)
        choiceAlbum.layer.cornerRadius = 20.0
        choiceAlbum.layer.position = CGPoint(x: self.view.bounds.width-190, y:self.view.bounds.height-150)
        choiceAlbum.addTarget(self, action: #selector(ViewController.onTapButtonAlbum), for: .touchUpInside)
        self.view.addSubview(choiceAlbum)



        // UI
        saveAlbum = UIButton(frame: CGRect(x: 0, y: 0, width: 120, height: 50))
        saveAlbum.backgroundColor = .red
        saveAlbum.layer.masksToBounds = true
        saveAlbum.setTitle("saveAlbum", for: .normal)
        saveAlbum.layer.cornerRadius = 20.0
        saveAlbum.layer.position = CGPoint(x: self.view.bounds.width-190, y:self.view.bounds.height-250)
        saveAlbum.addTarget(self, action: #selector(ViewController.onTapButtonSave), for: .touchUpInside)
        self.view.addSubview(saveAlbum)
    }

    @objc internal func onTapButtonSave(sender: UIButton){
        if let url = videoURL {
            // 動画URLからアセットを生成
            let videoAsset: AVURLAsset = AVURLAsset(url: url, options: nil)

            let videoName: String = "test.mov"
            let documentPath: String = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] as String
            let exportPath: String = documentPath + "/" + videoName
            let exportUrl: URL = URL(fileURLWithPath: exportPath)

            if FileManager.default.fileExists(atPath: exportPath) {
                try! FileManager.default.removeItem(atPath: exportPath)
            }

            // ベースとなる動画のコンポジション作成
            let mixComposition : AVMutableComposition = AVMutableComposition()
            let compositionVideoTrack: AVMutableCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid)!
            let compositionAudioTrack: AVMutableCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid)!

            // アセットからトラックを取得
            let videoTrack: AVAssetTrack = videoAsset.tracks(withMediaType: AVMediaType.video)[0]
            let audioTrack: AVAssetTrack = videoAsset.tracks(withMediaType: AVMediaType.audio)[0]
            // コンポジションの設定
            try! compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: videoTrack, at: kCMTimeZero)
            compositionVideoTrack.preferredTransform = videoTrack.preferredTransform

            try! compositionAudioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: audioTrack, at: kCMTimeZero)

            // ロゴのCALayerの作成
//            let logoImage: UIImage = UIImage(named: "logologo.png")!
//            let logoLayer: CALayer = CALayer()
//            logoLayer.contents = logoImage.cgImage
//            logoLayer.frame = CGRect(x: 5, y: 25, width: 100, height: 100)
//            logoLayer.opacity = 0.9

            var videoSize: CGSize = videoTrack.naturalSize;
            var isPortrait: Bool = false

            // ビデオを縦横方向
            let txf = videoTrack.preferredTransform
            if txf.tx == videoSize.width && txf.ty == videoSize.height {
                // landscape right
            } else if txf.tx == 0 && txf.ty == 0 {
                // landscape left
            } else if txf.tx == 0 && txf.ty == videoSize.width {
                // portrait upside down
                isPortrait = true
                videoSize = CGSize(width: videoSize.height, height: videoSize.width)
            } else  {
                // portrait
                isPortrait = true
                videoSize = CGSize(width: videoSize.height, height: videoSize.width)
            }

            // 親レイヤーを作成
            let parentLayer: CALayer = CALayer()
            let videoLayer: CALayer = CALayer()
            parentLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
            videoLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
            parentLayer.addSublayer(videoLayer)
            //parentLayer.addSublayer(logoLayer)

            // 合成用コンポジション作成
            let videoComp: AVMutableVideoComposition = AVMutableVideoComposition()
            videoComp.renderSize = videoSize
            videoComp.frameDuration = CMTimeMake(1, 30)
            videoComp.animationTool = AVVideoCompositionCoreAnimationTool.init(postProcessingAsVideoLayer: videoLayer, in: parentLayer)

            // インストラクション作成
            let instruction: AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
            instruction.timeRange = CMTimeRangeMake(kCMTimeZero, mixComposition.duration)
            let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
            instruction.layerInstructions = [layerInstruction]

            let affine2 = CGAffineTransform(rotationAngle: CGFloat((-90.0 * Float.pi) / 180.0))
            layerInstruction.setTransform(videoTrack.preferredTransform.concatenating(affine2).translatedBy(x: -3000, y: -300).scaledBy(x: 0.7, y: 0.7), at: kCMTimeZero)

            // インストラクションを合成用コンポジションに設定
            videoComp.instructions = [instruction]

            let exportSession:AVAssetExportSession = AVAssetExportSession.init(asset: videoAsset, presetName: self.videoQuality())!
            exportSession.outputURL = exportUrl
            exportSession.videoComposition = videoComp
            exportSession.outputFileType = AVFileType.mov
            exportSession.shouldOptimizeForNetworkUse = true
            exportSession.exportAsynchronously(completionHandler: {() -> Void in
                if exportSession.status == AVAssetExportSessionStatus.failed {
                    print("failed \(exportSession.status) \(String(describing: exportSession.error))")
                }

                // 端末に保存
                PHPhotoLibrary.shared().performChanges({
                    PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: exportUrl)
                }, completionHandler: {(success, err) -> Void in
                    var message = ""
                    if success {
                        message = "保存しました"
                    } else {
                        message = "保存に失敗しました"
                    }
                    // アラートを表示
                    DispatchQueue.main.async(execute: {
                        let alert = UIAlertController.init(title: "", message: message, preferredStyle: UIAlertControllerStyle.alert)
                        let action = UIAlertAction(title: "OK", style: UIAlertActionStyle.default){ (action: UIAlertAction) in
                            self.button.setTitle("START", for: .normal)
                            self.button.isEnabled = true
                            self.button.isHidden = false
                        }
                        alert.addAction(action)
                        self.present(alert, animated: true, completion: nil)
                    })
                })
            })
        }
    }

    @objc internal func onTapButtonAlbum(sender: UIButton){

        print("UIBarButtonItem。カメラロールから動画を選択")
        imagePickerController.sourceType = .photoLibrary
        imagePickerController.delegate = self
        imagePickerController.mediaTypes = ["public.image", "public.movie"]
        imagePickerController.videoQuality = UIImagePickerControllerQualityType.typeHigh
        present(imagePickerController, animated: true, completion: nil)
    }

    func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [String : Any]) {

        if let url = info[UIImagePickerControllerReferenceURL] as? NSURL {
            let refResult:PHFetchResult = PHAsset.fetchAssets(withALAssetURLs: [url.absoluteURL!], options: nil)
            let options: PHVideoRequestOptions = PHVideoRequestOptions()
            options.version = .original
            PHImageManager.default().requestAVAsset(forVideo: refResult[0], options: options, resultHandler: { (asset, audioMix, info) in
                if let urlAsset = asset as? AVURLAsset {
                    self.videoURL = urlAsset.url
                }
            })
        }

        //viewを戻す
        picker.dismiss(animated: true, completion: nil)
    }


    @objc internal func onTapButton(sender: UIButton){
        print("撮影!")
        if (self.recording) {
            // stop
            myVideoOutput.stopRecording()
            button.isEnabled = false
            button.isHidden = true
        } else {
            // start recording
            let path: String = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] as String
            let filePath: String = path + "/test.mov"
            let fileURL: URL = URL(fileURLWithPath: filePath)

            // 録画開始
            myVideoOutput.startRecording(to: fileURL, recordingDelegate: self)
            button.setTitle("STOP", for: .normal)
        }
        self.recording = !self.recording
    }

    func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {

        // 動画URLからアセットを生成
        let videoAsset: AVURLAsset = AVURLAsset(url: outputFileURL, options: nil)

        // ベースとなる動画のコンポジション作成
        let mixComposition : AVMutableComposition = AVMutableComposition()
        let compositionVideoTrack: AVMutableCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid)!
        let compositionAudioTrack: AVMutableCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid)!

        // アセットからトラックを取得
        let videoTrack: AVAssetTrack = videoAsset.tracks(withMediaType: AVMediaType.video)[0]
        let audioTrack: AVAssetTrack = videoAsset.tracks(withMediaType: AVMediaType.audio)[0]

        // コンポジションの設定
        try! compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: videoTrack, at: kCMTimeZero)
        compositionVideoTrack.preferredTransform = videoTrack.preferredTransform

        try! compositionAudioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: audioTrack, at: kCMTimeZero)

        // 動画のサイズを取得
        var videoSize: CGSize = videoTrack.naturalSize
        var isPortrait: Bool = false

        // ビデオを縦横方向
        if myVideoLayer.connection?.videoOrientation == .portrait {
            isPortrait = true
            videoSize = CGSize(width: videoSize.height, height: videoSize.width)
        }

//        // ロゴのCALayerの作成
//        let logoImage: UIImage = UIImage(named: "logologo.png")!
//        let logoLayer: CALayer = CALayer()
//        logoLayer.contents = logoImage.cgImage
//        logoLayer.frame = CGRect(x: 5, y: 25, width: 100, height: 100)
//        logoLayer.opacity = 0.9

        // 親レイヤーを作成
        let parentLayer: CALayer = CALayer()
        let videoLayer: CALayer = CALayer()
        parentLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
        videoLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
        parentLayer.addSublayer(videoLayer)
        //parentLayer.addSublayer(logoLayer)

        // 合成用コンポジション作成
        let videoComp: AVMutableVideoComposition = AVMutableVideoComposition()
        videoComp.renderSize = videoSize
        videoComp.frameDuration = CMTimeMake(1, 30)
        videoComp.animationTool = AVVideoCompositionCoreAnimationTool.init(postProcessingAsVideoLayer: videoLayer, in: parentLayer)

        // インストラクション作成
        let instruction: AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
        instruction.timeRange = CMTimeRangeMake(kCMTimeZero, mixComposition.duration)
        let layerInstruction: AVMutableVideoCompositionLayerInstruction = AVMutableVideoCompositionLayerInstruction.init(assetTrack: videoTrack)
        instruction.layerInstructions = [layerInstruction]

        // 縦方向で撮影なら90度回転させる
        if isPortrait {
            let FirstAssetScaleFactor:CGAffineTransform = CGAffineTransform(scaleX: 1.0, y: 1.0)
            layerInstruction.setTransform(videoTrack.preferredTransform.concatenating(FirstAssetScaleFactor), at: kCMTimeZero)
        }

        // インストラクションを合成用コンポジションに設定
        videoComp.instructions = [instruction]

        // 動画のコンポジションをベースにAVAssetExportを生成
        let assetExport = AVAssetExportSession.init(asset: mixComposition, presetName: self.videoQuality())
        // 合成用コンポジションを設定
        assetExport?.videoComposition = videoComp

        // エクスポートファイルの設定
        let videoName: String = "test.mov"
        let documentPath: String = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] as String
        let exportPath: String = documentPath + "/" + videoName
        let exportUrl: URL = URL(fileURLWithPath: exportPath)
        assetExport?.outputFileType = AVFileType.mov
        assetExport?.outputURL = exportUrl
        assetExport?.shouldOptimizeForNetworkUse = true

        // ファイルが存在している場合は削除
        if FileManager.default.fileExists(atPath: exportPath) {
            try! FileManager.default.removeItem(atPath: exportPath)
        }

        // エクスポート実行
        assetExport?.exportAsynchronously(completionHandler: {() -> Void in
            // 端末に保存
            PHPhotoLibrary.shared().performChanges({
                PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: exportUrl)
            }, completionHandler: {(success, err) -> Void in
                var message = ""
                if success {
                    message = "保存しました"
                } else {
                    message = "保存に失敗しました"
                }
                // アラートを表示
                DispatchQueue.main.async(execute: {
                    let alert = UIAlertController.init(title: "", message: message, preferredStyle: UIAlertControllerStyle.alert)
                    let action = UIAlertAction(title: "OK", style: UIAlertActionStyle.default){ (action: UIAlertAction) in
                        self.button.setTitle("START", for: .normal)
                        self.button.isEnabled = true
                        self.button.isHidden = false
                    }
                    alert.addAction(action)
                    self.present(alert, animated: true, completion: nil)
                })
            })
        })
    }

    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
        // Dispose of any resources that can be recreated.
    }

    func videoQuality() -> String {
        let session = AVCaptureSession()
        var result = AVAssetExportPresetLowQuality
        // 動画の画質
        if (session.canSetSessionPreset(AVCaptureSession.Preset.hd4K3840x2160)) {
            result = AVAssetExportPreset3840x2160
        } else if (session.canSetSessionPreset(AVCaptureSession.Preset.high)) {
            result = AVAssetExportPresetHighestQuality
        } else if (session.canSetSessionPreset(AVCaptureSession.Preset.medium)) {
            result = AVAssetExportPresetMediumQuality
        } else if (session.canSetSessionPreset(AVCaptureSession.Preset.low)) {
            result = AVAssetExportPresetLowQuality
        }

        return result
    }
}

ソース

GitHub

藤沢瞭介(Ryosuke Hujisawa)
  • りょすけと申します。18歳からプログラミングをはじめ、今はフロントエンドでReactを書いたり、AIの勉強を頑張っています。off.tokyoでは、ハイテクやガジェット、それからプログラミングに関する情報まで、エンジニアに役立つ情報を日々発信しています!

未整理記事