I am having an issue compressing my recorded video. I am able to begin recording with ease however, 100% of the time the compressing fails with no other errors attached. I was hoping a more experienced developer could point out any causes for issues as I have never worked with compression before.
//MARK: - Variable Declaration
@IBOutlet weak var vwPreviewLayer: UIView!
@IBOutlet weak var vwStartStopRecording: UIView!
var captureSession : AVCaptureSession?
var videoOutput : AVCaptureMovieFileOutput?
var previewLayer : AVCaptureVideoPreviewLayer?
var lpgr : UILongPressGestureRecognizer?
var captureDevice : AVCaptureDevice?
var recordDelegate : AVCaptureFileOutputRecordingDelegate?
//MARK: - View Life Cycle
override func viewDidLoad() {
super.viewDidLoad()
vwStartStopRecording.layer.cornerRadius = self.vwStartStopRecording.frame.size.width / 2
vwStartStopRecording.clipsToBounds = true
lpgr = UILongPressGestureRecognizer(target: self, action: #selector(self.action(_:)))
lpgr!.minimumPressDuration = 0.75
vwStartStopRecording.addGestureRecognizer(lpgr!)
beginSession()
}
override func viewDidAppear(animated: Bool) {
super.viewDidAppear(animated)
previewLayer?.frame = vwPreviewLayer.bounds
}
//MARK: - BEGIN SESSION
func beginSession() {
captureSession = AVCaptureSession()
captureSession?.sessionPreset = AVCaptureSessionPreset1920x1080
captureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
// let backCamera = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
// captureDevice! = backCamera!
var input : AVCaptureDeviceInput?
let _ : NSError?
do {
input = try AVCaptureDeviceInput(device: captureDevice)
} catch let error as NSError? {
print(error)
return//Stop rest of code
}
let audioCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)
let audioInput : AVCaptureDeviceInput?
let _ : NSError?
do{
audioInput = try AVCaptureDeviceInput(device: audioCaptureDevice)
}
catch let error as NSError? {
print(error)
return//Stop rest of code
}
if (captureSession?.canAddInput(audioInput))!{
captureSession?.addInput(audioInput)
}
if (captureSession?.canAddInput(input))!{
captureSession?.addInput(input)
videoOutput = AVCaptureMovieFileOutput()
//videoOutput?.outputSettings = [AVVideoCodecKey : AVVideoCodecKey]
if ((captureSession?.canAddOutput(videoOutput)) != nil){
captureSession?.addOutput(videoOutput)
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
previewLayer?.connection.videoOrientation = AVCaptureVideoOrientation.Portrait
vwPreviewLayer.layer.addSublayer(previewLayer!)
captureSession?.startRunning()
}
}
}
//MARK: - Long Press Gesture Action
var videoData : NSData?
var url : NSURL?
var dataPath : String?
func action(gestureRecognizer:UILongPressGestureRecognizer) {
if (lpgr!.state == UIGestureRecognizerState.Began) {
print("Began")
device.torchMode == AVCaptureTorchMode.On
let paths = NSSearchPathForDirectoriesInDomains(NSSearchPathDirectory.DocumentDirectory, NSSearchPathDomainMask.UserDomainMask, true)
let documentsDirectory: AnyObject = paths[0]
let format = NSDateFormatter()
format.dateFormat="yyyy-MM-dd-HH-mm-ss"
dataPath = documentsDirectory.stringByAppendingPathComponent("/video-(format.stringFromDate(NSDate())).mp4")
url = NSURL(fileURLWithPath: dataPath!)
videoOutput!.startRecordingToOutputFileURL(url, recordingDelegate: self)
print("(url)")
NSUserDefaults.standardUserDefaults().setURL(url, forKey: "videoURL")
NSUserDefaults.standardUserDefaults().synchronize()
}
if (lpgr!.state == UIGestureRecognizerState.Ended) {
print("Ended")
videoOutput!.stopRecording()
device.torchMode == AVCaptureTorchMode.Off
url = NSUserDefaults.standardUserDefaults().URLForKey("videoURL")
let format = NSDateFormatter()
format.dateFormat="yyyy-MM-dd-HH-mm-ss"
let outputURl = url!.URLByDeletingLastPathComponent!.URLByAppendingPathComponent("/video(format.stringFromDate(NSDate())).mp4")
self.compressVideo(url!, outputURL: outputURl, handler: { (session) in
if session.status == AVAssetExportSessionStatus.Completed
{
self.url! = outputURl
print(self.url)
let data = NSData(contentsOfURL: self.url!)
NSUserDefaults.standardUserDefaults().setURL(self.url, forKey: "videoURL")
NSUserDefaults.standardUserDefaults().synchronize()
print("File size after compression: (Double(data!.length / 1048576)) mb")
self.videoData = NSData(contentsOfURL: self.url!)
print(self.videoData)
self.performSegueWithIdentifier("segueTest", sender: nil)
}
else if session.status == AVAssetExportSessionStatus.Failed
{
print("failed")
}
})
}
}
//MARK: - Compress Video
func compressVideo(inputURL: NSURL, outputURL: NSURL, handler:(session: AVAssetExportSession)-> Void)
{
let urlAsset = AVURLAsset(URL: inputURL, options: nil)
let exportSession = AVAssetExportSession(asset: urlAsset, presetName: AVAssetExportPresetMediumQuality)
exportSession!.outputURL = outputURL
exportSession!.outputFileType = AVFileTypeQuickTimeMovie
exportSession!.shouldOptimizeForNetworkUse = true
exportSession!.exportAsynchronouslyWithCompletionHandler { () -> Void in
handler(session: exportSession!)
}
}
Aucun commentaire:
Enregistrer un commentaire