Welcome to OStack Knowledge Sharing Community for programmer and developer-Open, Learning and Share
Welcome To Ask or Share your Answers For Others

Categories

0 votes
319 views
in Technique[技术] by (71.8m points)

IOS Video Compression Swift iOS 8 corrupt video file

I am trying to compress video taken with the users camera from UIImagePickerController (Not an existing video but one on the fly) to upload to my server and take a small amount of time to do so, so a smaller size is ideal instead of 30-45 mb on newer quality cameras.

Here is the code to do a compression in swift for iOS 8 and it compresses wonderfully, i go from 35 mb down to 2.1 mb easily.

   func convertVideo(inputUrl: NSURL, outputURL: NSURL) 
   {
    //setup video writer
    var videoAsset = AVURLAsset(URL: inputUrl, options: nil) as AVAsset

    var videoTrack = videoAsset.tracksWithMediaType(AVMediaTypeVideo)[0] as AVAssetTrack

    var videoSize = videoTrack.naturalSize

    var videoWriterCompressionSettings = Dictionary(dictionaryLiteral:(AVVideoAverageBitRateKey,NSNumber(integer:960000)))

    var videoWriterSettings = Dictionary(dictionaryLiteral:(AVVideoCodecKey,AVVideoCodecH264),
        (AVVideoCompressionPropertiesKey,videoWriterCompressionSettings),
        (AVVideoWidthKey,videoSize.width),
        (AVVideoHeightKey,videoSize.height))

    var videoWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoWriterSettings)

    videoWriterInput.expectsMediaDataInRealTime = true

    videoWriterInput.transform = videoTrack.preferredTransform


    var videoWriter = AVAssetWriter(URL: outputURL, fileType: AVFileTypeQuickTimeMovie, error: nil)

    videoWriter.addInput(videoWriterInput)

    var videoReaderSettings: [String:AnyObject] = [kCVPixelBufferPixelFormatTypeKey:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange]

    var videoReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: videoReaderSettings)

    var videoReader = AVAssetReader(asset: videoAsset, error: nil)

    videoReader.addOutput(videoReaderOutput)



    //setup audio writer
    var audioWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: nil)

    audioWriterInput.expectsMediaDataInRealTime = false

    videoWriter.addInput(audioWriterInput)


    //setup audio reader

    var audioTrack = videoAsset.tracksWithMediaType(AVMediaTypeAudio)[0] as AVAssetTrack

    var audioReaderOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: nil) as AVAssetReaderOutput

    var audioReader = AVAssetReader(asset: videoAsset, error: nil)


    audioReader.addOutput(audioReaderOutput)

    videoWriter.startWriting()


    //start writing from video reader
    videoReader.startReading()

    videoWriter.startSessionAtSourceTime(kCMTimeZero)

    //dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue", nil)

    var queue = dispatch_queue_create("processingQueue", nil)

    videoWriterInput.requestMediaDataWhenReadyOnQueue(queue, usingBlock: { () -> Void in
        println("Export starting")

        while videoWriterInput.readyForMoreMediaData
        {
            var sampleBuffer:CMSampleBufferRef!

            sampleBuffer = videoReaderOutput.copyNextSampleBuffer()

            if (videoReader.status == AVAssetReaderStatus.Reading && sampleBuffer != nil)
            {
                videoWriterInput.appendSampleBuffer(sampleBuffer)

            }

            else
            {
                videoWriterInput.markAsFinished()

                if videoReader.status == AVAssetReaderStatus.Completed
                {
                    if audioReader.status == AVAssetReaderStatus.Reading || audioReader.status == AVAssetReaderStatus.Completed
                    {

                    }
                    else {


                        audioReader.startReading()

                        videoWriter.startSessionAtSourceTime(kCMTimeZero)

                        var queue2 = dispatch_queue_create("processingQueue2", nil)


                        audioWriterInput.requestMediaDataWhenReadyOnQueue(queue2, usingBlock: { () -> Void in

                            while audioWriterInput.readyForMoreMediaData
                            {
                                var sampleBuffer:CMSampleBufferRef!

                                sampleBuffer = audioReaderOutput.copyNextSampleBuffer()

                                println(sampleBuffer == nil)

                                if (audioReader.status == AVAssetReaderStatus.Reading && sampleBuffer != nil)
                                {
                                    audioWriterInput.appendSampleBuffer(sampleBuffer)

                                }

                                else
                                {
                                    audioWriterInput.markAsFinished()

                                    if (audioReader.status == AVAssetReaderStatus.Completed)
                                    {

                                        videoWriter.finishWritingWithCompletionHandler({ () -> Void in

                                            println("Finished writing video asset.")

                                            self.videoUrl = outputURL

                                                var data = NSData(contentsOfURL: outputURL)!

                                                 println("Byte Size After Compression: (data.length / 1048576) mb")

                                                println(videoAsset.playable)

                                                //Networking().uploadVideo(data, fileName: "Test2")

                                            self.dismissViewControllerAnimated(true, completion: nil)

                                        })
                                        break
                                    }
                                }
                            }
                        })
                        break
                    }
                }
            }// Second if

        }//first while

    })// first block
   // return
}

Here is the code for my UIImagePickerController that calls the compress method

func imagePickerController(picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [NSObject : AnyObject])
{
    // Extract the media type from selection

    let type = info[UIImagePickerControllerMediaType] as String

    if (type == kUTTypeMovie)
    {

        self.videoUrl = info[UIImagePickerControllerMediaURL] as? NSURL

        var uploadUrl = NSURL.fileURLWithPath(NSTemporaryDirectory().stringByAppendingPathComponent("captured").stringByAppendingString(".mov"))

        var data = NSData(contentsOfURL: self.videoUrl!)!

        println("Size Before Compression: (data.length / 1048576) mb")


        self.convertVideo(self.videoUrl!, outputURL: uploadUrl!)

        // Get the video from the info and set it appropriately.

        /*self.dismissViewControllerAnimated(true, completion: { () -> Void in


        //self.next.enabled = true

        })*/
    }
}

As i mentioned above this works as far as file size reduction, but when i get the file back (it is still of type .mov) quicktime cannot play it. Quicktime does try to convert it initially but fails halfway through (1-2 seconds after opening the file.) I've even tested the video file in AVPlayerController but it doesn't give any info about the movie, its just a play button without ant loading and without any length just "--" where the time is usually in the player. IE a corrupt file that won't play.

Im sure it has something to do with the settings for writing the asset out wether it is the video writing or the audio writing I'm not sure at all. It could even be the reading of the asset that is causing it to be corrupt. I've tried changing the variables around and setting different keys for reading and writing but i haven't found the right combination and this sucks that i can compress but get a corrupt file out of it. I'm not sure at all and any help would be appreciated. Pleeeeeeeeease.

See Question&Answers more detail:os

与恶龙缠斗过久,自身亦成为恶龙;凝视深渊过久,深渊将回以凝视…
Welcome To Ask or Share your Answers For Others

1 Answer

0 votes
by (71.8m points)

This answer has been completely rewritten and annotated to support Swift 4.0. Keep in mind that changing the AVFileType and presetName values allows you to tweak the final output in terms of size and quality.

import AVFoundation

extension ViewController: AVCaptureFileOutputRecordingDelegate {
    // Delegate function has been updated
    func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
        // This code just exists for getting the before size. You can remove it from production code
        do {
            let data = try Data(contentsOf: outputFileURL)
            print("File size before compression: (Double(data.count / 1048576)) mb")
        } catch {
            print("Error: (error)")
        }
        // This line creates a generic filename based on UUID, but you may want to use your own
        // The extension must match with the AVFileType enum
        let path = NSTemporaryDirectory() + UUID().uuidString + ".m4v"
        let outputURL = URL.init(fileURLWithPath: path)
        let urlAsset = AVURLAsset(url: outputURL)
        // You can change the presetName value to obtain different results
        if let exportSession = AVAssetExportSession(asset: urlAsset,
                                                    presetName: AVAssetExportPresetMediumQuality) {
            exportSession.outputURL = outputURL
            // Changing the AVFileType enum gives you different options with
            // varying size and quality. Just ensure that the file extension
            // aligns with your choice
            exportSession.outputFileType = AVFileType.mov
            exportSession.exportAsynchronously {
                switch exportSession.status {
                case .unknown: break
                case .waiting: break
                case .exporting: break
                case .completed:
                    // This code only exists to provide the file size after compression. Should remove this from production code
                    do {
                        let data = try Data(contentsOf: outputFileURL)
                        print("File size after compression: (Double(data.count / 1048576)) mb")
                    } catch {
                        print("Error: (error)")
                    }
                case .failed: break
                case .cancelled: break
                }
            }
        }
    }
}

Below is the original answer as written for Swift 3.0:

extension ViewController: AVCaptureFileOutputRecordingDelegate {
    func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
        guard let data = NSData(contentsOf: outputFileURL as URL) else {
            return
        }

        print("File size before compression: (Double(data.length / 1048576)) mb")
        let compressedURL = NSURL.fileURL(withPath: NSTemporaryDirectory() + NSUUID().uuidString + ".m4v")
        compressVideo(inputURL: outputFileURL as URL, outputURL: compressedURL) { (exportSession) in
            guard let session = exportSession else {
                return
            }

            switch session.status {
            case .unknown:
                break
            case .waiting:
                break
            case .exporting:
                break
            case .completed:
                guard let compressedData = NSData(contentsOf: compressedURL) else {
                    return
                }

                print("File size after compression: (Double(compressedData.length / 1048576)) mb")
            case .failed:
                break
            case .cancelled:
                break
            }
        }
    }

    func compressVideo(inputURL: URL, outputURL: URL, handler:@escaping (_ exportSession: AVAssetExportSession?)-> Void) {
        let urlAsset = AVURLAsset(url: inputURL, options: nil)
        guard let exportSession = AVAssetExportSession(asset: urlAsset, presetName: AVAssetExportPresetMediumQuality) else {
            handler(nil)

            return
        }

        exportSession.outputURL = outputURL
        exportSession.outputFileType = AVFileTypeQuickTimeMovie
        exportSession.shouldOptimizeForNetworkUse = true
        exportSession.exportAsynchronously { () -> Void in
            handler(exportSession)
        }
    }
}

与恶龙缠斗过久,自身亦成为恶龙;凝视深渊过久,深渊将回以凝视…
Welcome to OStack Knowledge Sharing Community for programmer and developer-Open, Learning and Share
Click Here to Ask a Question

2.1m questions

2.1m answers

60 comments

57.0k users

...