直接上代碼
一個簡單的視頻剪輯app
https://apps.apple.com/cn/app/id6689516466
1、初始化metalView
metalView = MTKView.init(frame: self.view.bounds)
//獲取GPU硬件設(shè)備
metalView.device = MTLCreateSystemDefaultDevice()
//初始化metalRender, metalRender負(fù)責(zé)metal的渲染邏輯metalRender = MetalRender.init(mtkView: metalView)
//實現(xiàn)metalView的繪制刷新代理metalView?.delegate = metalRenderself.view.addSubview(metalView)
2、metalRender的實現(xiàn)邏輯
//初始化metalRenderinit(mtkView: MTKView) {
super.init()
device = mtkView.device
//記錄metalView 的size, 后面會傳給metal shader 用于畫面渲染
self.videoPortSize = vector_float2.init(x:
Float(mtkView.drawableSize.width), y:
Float(mtkView.drawableSize.height))
//設(shè)置渲染管道
self.setupPipeline(mtkView: mtkView)
//設(shè)置頂點數(shù)據(jù)
self.setupVexterData()
//設(shè)置lut 紋理
self.setupLutTexture()
//初始化metal紋理緩存,用于上傳視頻解碼的紋理
CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, device!, nil, &videoTextureCache)
}
3、初始化渲染管道
func setupPipeline(mtkView: MTKView) {
//獲取默認(rèn)的shader library 也可以通過名字加載library
let library = device?.makeDefaultLibrary()
//獲取頂點函數(shù)
let vexterFunc = library?.makeFunction(name: "videoVertexShader") //獲取片元函數(shù)
let fragmentFunc = library?.makeFunction(name: "videoFragmentShader")
//初始化渲染管道描述 用于附加頂點函數(shù)和片元函數(shù)
let pipelineDescription = MTLRenderPipelineDescriptor.init()
pipelineDescription.vertexFunction = vexterFunc
pipelineDescription.fragmentFunction = fragmentFunc
pipelineDescription.colorAttachments[0].pixelFormat =
mtkView.colorPixelFormat //生成渲染管道,并返回渲染管道狀態(tài)
pipelineStatus = try? device?.makeRenderPipelineState(descriptor: pipelineDescription)
if pipelineStatus == nil {
debugPrint("pipeline init fail")
}
//初始化命令隊列
self.commendQueue = self.device?.makeCommandQueue() }
4、設(shè)置頂點函數(shù)
//設(shè)置頂點函數(shù)func setupVexterData() {
let x = self.videoPortSize.x / 2
let y = self.videoPortSize.y / 2
//設(shè)置兩個三角形坐標(biāo),組成一個正方形坐標(biāo)
//metal的視圖的坐標(biāo)原點在視圖的正中心
//所以計算視圖的寬高可以將正方形鋪滿整個view
let vexterData: [VexterData] = [
VexterData(position: [x, -y], textcoord: [1.0, 0.0]),
VexterData(position: [-x, -y], textcoord: [0.0, 0.0]),
VexterData(position: [-x, y], textcoord: [0.0, 1.0]), VexterData(position: [x, -y], textcoord: [1.0, 0.0]),
VexterData(position: [-x, y], textcoord: [0.0, 1.0]),
VexterData(position: [x, y], textcoord: [1.0, 1.0]),
]
//生成頂點buffer
vexterBuffer = self.device?.makeBuffer(bytes: vexterData, length: MemoryLayout<VexterData>.size * 6, options: .storageModeShared)
//生成默認(rèn)的yuv轉(zhuǎn)RGB顏色轉(zhuǎn)換矩陣
self.converMatrix = self.device?.makeBuffer(bytes: &colorConversionMatrix, length: MemoryLayout<matrix_float3x3>.size , options: .storageModeShared)
}
5、設(shè)置lut紋理
func setupLutTexture() {
guard let image = UIImage.init(named: "lookup")?.cgImage else { return }
//初始化紋理描述對象
let textureDes = MTLTextureDescriptor.init()
//設(shè)置紋理的寬 從圖像中讀取
textureDes.width = image.width //設(shè)置紋理的高
textureDes.height = image.height //設(shè)置紋理的像素格式
textureDes.pixelFormat = .rgba8Unorm //設(shè)置紋理的使用方式
textureDes.usage = .shaderRead //初始化紋理buffer
self.textureLut = device?.makeTexture(descriptor: textureDes)
//獲取image的數(shù)據(jù)
let data = loadImageData(image: image)
//設(shè)置紋理顯示的區(qū)域
let region = MTLRegionMake2D(0, 0, image.width, image.height)
//上傳紋理到GPU
self.textureLut?.replace(region: region, mipmapLevel: 0, withBytes: data, bytesPerRow: image.width * 4) }
func loadImageData(image: CGImage) ->UnsafeMutablePointer<GLubyte> {
let width = image.width
let height = image.height
let data: UnsafeMutablePointer = UnsafeMutablePointer<GLubyte>.allocate(capacity: MemoryLayout<GLubyte>.size * width * height * 4)
UIGraphicsBeginImageContext(CGSize.init(width: width, height: height))
let context = CGContext(data: data, width: width, height: height, bitsPerComponent: 8, bytesPerRow: width * 4, space: image.colorSpace!, bitmapInfo: image.bitmapInfo.rawValue)
context?.translateBy( x: 0, y: CGFloat(height)) context?.scaleBy(x: 1, y: -1)
context?.draw(image, in: CGRect(x: 0, y: 0, width: width, height: height))
UIGraphicsEndImageContext()
return data
}
6、上傳視頻幀到GPU
func setTexture(pix: CVPixelBuffer) {
//獲取圖像中的plane
let planeCount = CVPixelBufferGetPlaneCount(pix)
//獲取圖像中的顏色轉(zhuǎn)換矩陣
let colorAttachments = CVBufferGetAttachment(pix, kCVImageBufferYCbCrMatrixKey, nil)?.takeUnretainedValue() as? String if colorAttachments != nil {
if CFStringCompare(colorAttachments as! CFString, kCVImageBufferYCbCrMatrix_ITU_R_601_4, .compareCaseInsensitive) == .compareEqualTo {
colorConversionMatrix = kMetalColorConversion601
//顏色空間是601
} else {
//顏色空間是709
colorConversionMatrix = kMetalColorConversion709
}
self.converMatrix = self.device?.makeBuffer(bytes: &colorConversionMatrix, length: MemoryLayout<matrix_float3x3>.size, options: .storageModeShared)
} else {
assert(false, "get color space fail ...")
}
//獲的YUV圖像中Y的寬度
let plane0Width = CVPixelBufferGetWidthOfPlane(pix, 0)
//獲取YUV圖像中UV的高度
let plane0Height = CVPixelBufferGetHeightOfPlane(pix, 0)
if let videoTextureCache = self.videoTextureCache {
var texture: CVMetalTexture?
//上傳YUV圖像中的Y分量到cache中
let status = CVMetalTextureCacheCreateTextureFromImage(nil, videoTextureCache, pix, nil, .r8Unorm, plane0Width, plane0Height, 0, &texture)
if status == kCVReturnSuccess {
//獲取YUV圖像中的Y分量的紋理對象
self.textureY = CVMetalTextureGetTexture(texture!)
}
}
//下面是上傳YUV分量中的UV
if planeCount == 2 {
let plane1Width = CVPixelBufferGetWidthOfPlane(pix, 1)
let plane1Height = CVPixelBufferGetHeightOfPlane(pix, 1)
if let videoTextureCache = self.videoTextureCache {
var texture: CVMetalTexture?
let status = CVMetalTextureCacheCreateTextureFromImage(nil, videoTextureCache, pix, nil, .rg8Unorm, plane1Width, plane1Height, 1, &texture)
if status == kCVReturnSuccess {
self.textureUV = CVMetalTextureGetTexture(texture!) }
}
}
}
7、metalView的代理
//當(dāng)視圖改變時,會調(diào)用這個f方法,可以在這個方法里繪制圖像的大小
func mtkView(_ view: MTKView, drawableSizeWillChange size: CGSize) {
self.videoPortSize = vector_float2.init(x: Float(size.width), y: Float(size.height))
}
//繪制圖像
func draw(in view: MTKView) {
//設(shè)置命令buffer
let commendBuffer = self.commendQueue?.makeCommandBuffer()
commendBuffer?.label = "metal render commendBuffer"
//獲取當(dāng)前視圖的渲染描述
let renderDescription = view.currentRenderPassDescriptor
if let renderDescription = renderDescription {
//清理視圖的顏色
renderDescription.colorAttachments[0].clearColor =
MTLClearColorMake(0, 0, 0, 1.0)
//獲取編碼對象,
let encoder = commendBuffer?.makeRenderCommandEncoder(descriptor: renderDescription)
encoder?.label = "metal encoder"
//設(shè)置視口大小
encoder?.setViewport(MTLViewport.init(originX: 0, originY: 0, width: Double(self.videoPortSize.x), height: Double(self.videoPortSize.y), znear: -1, zfar: 1))
//關(guān)聯(lián)渲染管道
if let pipelineStatus = pipelineStatus {
encoder?.setRenderPipelineState(pipelineStatus) }
//設(shè)置頂點數(shù)據(jù)給metal 給頂點函數(shù) 索引是0
encoder?.setVertexBuffer(self.vexterBuffer, offset: 0, index: 0)
//設(shè)置繪制圖形的d大小 給頂點函數(shù) 索引是1
encoder?.setVertexBytes(&self.videoPortSize, length: MemoryLayout<vector_float2>.size, index: 1)
//設(shè)置特效類型給片元函數(shù) 索引是1
encoder?.setFragmentBytes(&type, length: MemoryLayout<Int>.size, index: 1)
//設(shè)置顏色轉(zhuǎn)換矩陣給片元函數(shù)。索引是0
encoder?.setFragmentBuffer(self.converMatrix, offset: 0, index: 0)
//設(shè)置紋理YUV中的Y 索引是0
encoder?.setFragmentTexture(textureY, index: 0)
//設(shè)置紋理YUV中的UV分量 索引是1
encoder?.setFragmentTexture(textureUV, index: 1)
//設(shè)置lut圖像
encoder?.setFragmentTexture(textureLut, index: 2)
//繪制圖形
encoder?.drawPrimitives(type: .triangle, vertexStart: 0, vertexCount: 6)
//結(jié)束編碼
encoder?.endEncoding()
//提交繪制
commendBuffer?.present(view.currentDrawable!)
}
//結(jié)束
commendBuffer?.commit()
}
8、metal shader
// 頂點結(jié)構(gòu)體
typedef struct {
float4 position [[position]];
float2 textcoord;
} VexterData;
// 輸入的頂點結(jié)構(gòu)體
typedef struct {
float2 position;
float2 textcoord;
} VexterDataIn;
vertex VexterData videoVertexShader(uint vertexID [[vertex_id]], constant VexterDataIn *vertexData [[buffer(0)]], constant vector_float2 viewPortSizePointer [[buffer(1)]]) {
VexterData out;
//設(shè)置默認(rèn)的頂點位置
out.position = vector_float4(0, 0, 0, 1.0);
//獲取頂點坐標(biāo)
float2 pixelSpacePosition = vertexData[vertexID].position.xy;
//獲取實際的View的大小
vector_float2 viewPortSize = vector_float2(viewPortSizePointer);
//轉(zhuǎn)換成metal的頂點 也就是歸一化
out.position.xy = pixelSpacePosition / (viewPortSize / 2.0);
//設(shè)置紋理坐標(biāo)
out.textcoord = vertexData[vertexID].textcoord;
//返回頂點數(shù)據(jù)
return out;
}
9、片元函數(shù)
fragment float4 videoFragmentShader(VexterData input [[stage_in]], texture2d<float> textureY [[texture(0)]], texture2d<float> textureUV [[texture(1)]], texture2d<float> lut [[texture(2)]], constant float3x3 *colorMatix [[buffer(0)]], constant int type [[buffer(1)]]) {
//初始化采樣器
constexpr sampler textureSampler(mag_filter:: linear, min_filter:: linear);
//設(shè)置采樣坐標(biāo) 因為圖像是上下顛倒的 所有采樣的時候反過來采樣
float2 textcoord = float2(input.textcoord.x, 1.0 - input.textcoord.y);
//獲取yuv的值
float3 yuv = float3(textureY.sample(textureSampler, textcoord).r, textureUV.sample(textureSampler, textcoord).rg - float2(0.5, 0.5));
float4 rgba = float4(0.0, 0.0, 0.0, 1.0);
switch(type) {
case 0:{
//正常播放
rgba = normalColor(yuv, *colorMatix);
break;
}
case 1:{
//黑白播放
rgba = clearColor(yuv, *colorMatix);
break;
}
case 2:{
//九宮格播放
float2 textCoord = calNighTextCoord(input.textcoord);
textCoord = float2(textCoord.x, 1 - textCoord.y);
float3 yuv = float3(textureY.sample(textureSampler, textCoord).r, textureUV.sample(textureSampler, textCoord).rg - float2(0.5, 0.5));
float3 nRgb = transformYUVToRGB(yuv, *colorMatix);
rgba = float4(nRgb, 1.0);
break;
}
case 3: {
//濾鏡播放
float3 nrgb = transformYUVToRGB(yuv, *colorMatix);
rgba = filterPlay(nrgb, lut);
break;
}
default:break;
}
return rgba;
}
//黑白播放
float4 clearColor(float3 yuv,float3x3 matrix) {
return float4(matrix * float3(yuv.x, 0, 0), 1.0);
}
//正常播放
float4 normalColor(float3 yuv, float3x3 matrix) {
return float4(matrix * yuv, 1.0);
}
// yuv轉(zhuǎn)換RGB
float3 transformYUVToRGB(float3 yuv, float3x3 matrix) {
return matrix * yuv;
}
//計算九宮格采樣點
float2 calNighTextCoord(float2 textCoord) {
float2 tcd = textCoord;
if (tcd.x < 1.0 / 3.0) {
tcd.x = tcd.x * 3.0;
} else if (tcd.x < 2.0 / 3.0) {
tcd.x = (tcd.x - 1.0 / 3.0) * 3.0;
} else {
tcd.x = (tcd.x - 2.0 / 3.0) * 3.0;
} if (tcd.y <= 1.0 / 3.0) {
tcd.y = tcd.y * 3.0;
} else if (tcd.y < 2.0 / 3.0) {
tcd.y = (tcd.y - 1.0 / 3.0) * 3.0;
} else {
tcd.y = (tcd.y - 2.0 / 3.0) * 3.0;
}
return tcd;
}
//濾鏡播放
float4 filterPlay(float3 rgb, texture2d<float> lut) {
float blueColor = rgb.b * 63.0;
float2 quad1;
quad1.y = floor(floor(blueColor) / 8.0);
quad1.x = floor(blueColor) - (quad1.y * 8.0);
float2 quad2;
quad2.y = floor(ceil(blueColor) / 8.0);
quad2.x = ceil(blueColor) - (quad2.y * 8.0);
float2 texPos1;
texPos1.x = (quad1.x * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * rgb.r);
texPos1.y = (quad1.y * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * rgb.g);
float2 texPos2;
texPos2.x = (quad2.x * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * rgb.r);
texPos2.y = (quad2.y * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * rgb.g);
constexpr sampler textureSampler(mag_filter:: linear, min_filter:: linear);
float4 newColor1 = lut.sample(textureSampler, texPos1);
float4 newColor2 = lut.sample(textureSampler, texPos1);
float4 newColor = mix(newColor1, newColor2, fract(blueColor));
float4 fragColor = mix(float4(rgb, 0.0), float4(newColor.rgb, 0.0), 1.0);
return fragColor;
}