iOS 從視頻文件中抽取所有幀

原文地址: https://blog.lm1024.club/archives/202012021932
一個抽取視頻中所有幀的工具類,方便以后使用

import Foundation
import AVFoundation
public class WJVideoReader:NSObject {
    private let reader:AVAssetReader?
    private var videoTrackOutput:AVAssetReaderTrackOutput?
    private var isReading:Bool = false
    public var delegate: WJVideoSourceOutputDelegate?
    
    public var pushThread:Thread?
    let pushVideoQueue = DispatchQueue(label: "com.push.video.queue",qos: DispatchQoS.default)
    
     init(path:String) {
        
        let assert = AVURLAsset(url: URL(fileURLWithPath: path))
        self.reader = try? AVAssetReader.init(asset: assert)
        let videoTrack = assert.tracks(withMediaType: .video).first
        let outputSettings:[String:Any] = [kCVPixelBufferPixelFormatTypeKey as String : NSNumber(value: kCVPixelFormatType_32ARGB)]
        self.videoTrackOutput = AVAssetReaderTrackOutput(track: videoTrack!, outputSettings: outputSettings)
        self.videoTrackOutput!.alwaysCopiesSampleData = false
        if  self.reader!.canAdd(self.videoTrackOutput!) {
            self.reader?.add(self.videoTrackOutput!)
        }
        super.init()
        self.pushThread = Thread(target: self, selector: #selector(lanchRunloop), object: nil)
        self.pushThread!.start()
        
        
    }
    
    public func startReader()
    {
        if self.reader!.status != AVAssetReader.Status.reading {
            self.goReader()
        }
    }
    public func stopReader(){
        if self.reader!.status == AVAssetReader.Status.reading  {
            self.reader!.cancelReading()
        }
    }
    
    private func goReader(){
        let status = self.reader?.status
        if status != AVAssetReader.Status.reading {
            self.reader?.startReading()
        }
      
        var lastTime:CMTime = CMTime.zero;
        DispatchQueue.global().async { [weak self] in
            guard let self__ = self else { return }
            while self__.reader!.status == AVAssetReader.Status.reading  {
                
                if let sampleBuffer =  self__.videoTrackOutput?.copyNextSampleBuffer() {
                    self__.perform(#selector(self__.pushSmaple(smaple:)), on: self__.pushThread!, with: sampleBuffer, waitUntilDone: false)
                    let bufferDuration = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer)
                    var pauseTime = CMTimeSubtract(bufferDuration, lastTime)
                    if lastTime == .zero {
                        pauseTime = CMTime(value: 4, timescale: 100)
                    }
                    lastTime = bufferDuration
                    print(" pauseTime = \(CMTimeGetSeconds(pauseTime))")
                    Thread.sleep(forTimeInterval: CMTimeGetSeconds(pauseTime))
                }
            }
        }
    }
    
    @objc internal func lanchRunloop() {
         autoreleasepool {
             print("lanchRunloop")
             let currentThread: Thread = Thread.current
             currentThread.name = "com.wuji.video.push"
             let currentRunLoop: RunLoop = RunLoop.current
             currentRunLoop.add(NSMachPort(), forMode: .common)
             currentRunLoop.run()
         }
    }
    
   @objc private func pushSmaple(smaple:CMSampleBuffer) {
        if let outputDelegate = self.delegate {
            outputDelegate.output(sampleBuffer: smaple)
        }
    }
}
?著作權(quán)歸作者所有,轉(zhuǎn)載或內(nèi)容合作請聯(lián)系作者
【社區(qū)內(nèi)容提示】社區(qū)部分內(nèi)容疑似由AI輔助生成,瀏覽時請結(jié)合常識與多方信息審慎甄別。
平臺聲明:文章內(nèi)容(如有圖片或視頻亦包括在內(nèi))由作者上傳并發(fā)布,文章內(nèi)容僅代表作者本人觀點,簡書系信息發(fā)布平臺,僅提供信息存儲服務(wù)。

相關(guān)閱讀更多精彩內(nèi)容

友情鏈接更多精彩內(nèi)容