-
Notifications
You must be signed in to change notification settings - Fork 872
/
Copy pathAPLCrossDissolveRenderer.swift
258 lines (201 loc) · 11.2 KB
/
APLCrossDissolveRenderer.swift
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
/*
Copyright (C) 2017 Apple Inc. All Rights Reserved.
See LICENSE.txt for this sample’s licensing information
Abstract:
APLCrossDissolveRenderer subclass of APLMetalRenderer, renders the given source buffers to perform a cross
dissolve over the time range of the transition.
*/
import Foundation
import CoreVideo
import MetalKit
class APLCrossDissolveRenderer: APLMetalRenderer {
/// Vertex coordinates used for drawing our geomentric primitives (triangles).
fileprivate let vertexArray: [Float] = [
-1.0, 1.0, 0, 1,
-1.0, -1.0, 0, 1,
1.0, -1.0, 0, 1,
-1.0, 1.0, 0, 1,
1.0, -1.0, 0, 1,
1.0, 1.0, 0, 1
]
/// Texture coordinates used for drawing textures in the texture coordinate system.
fileprivate let textureCoordsArray: [Float] = [
0.0, 0.0,
0.0, 1.0,
1.0, 1.0,
0.0, 0.0,
1.0, 1.0,
1.0, 0.0
]
/// The colors for each vertex coordinate.
fileprivate let colorArray: [Float] = [
1, 0, 0, 1,
0, 1, 0, 1,
0, 0, 1, 1,
1, 0, 0, 1,
0, 0, 1, 1,
1, 0, 1, 1
]
/// MTLRenderPipelineState objects that contains compiled rendering state, including vertex and fragment shaders.
fileprivate var foregroundRenderPipelineState: MTLRenderPipelineState?
fileprivate var backgroundRenderPipelineState: MTLRenderPipelineState?
/// MTLBuffer used for vertex data.
fileprivate var vertexBuffer: MTLBuffer?
/// MTLBuffer used for texture data.
fileprivate var textureCoordBuffer: MTLBuffer?
/// MTLBuffer used for color data.
fileprivate var colorBuffer: MTLBuffer?
/*
Instance of RenderPixelBuffers to maintain references to pixel buffers until they are no longer
needed.
*/
fileprivate var pixelBuffers: RenderPixelBuffers?
override init?() {
super.init()
// The default library contains all of the shader functions that were compiled into our app bundle.
guard let library = device.newDefaultLibrary() else { return nil }
// Retrieve the functions that will comprise our pipeline.
// Load the vertex program into the library
guard let vertexFunc = library.makeFunction(name: "passthroughVertexShader") else { return nil }
// Load the fragment program into the library
guard let fragmentFunc = library.makeFunction(name: "texturedQuadFragmentShader") else { return nil }
vertexBuffer =
device.makeBuffer(bytes: vertexArray,
length: vertexArray.count * MemoryLayout.size(ofValue: vertexArray[0]),
options: .storageModeShared)
textureCoordBuffer =
device.makeBuffer(bytes: textureCoordsArray,
length: textureCoordsArray.count * MemoryLayout.size(ofValue: textureCoordsArray[0]),
options: .storageModeShared)
colorBuffer =
device.makeBuffer(bytes: colorArray,
length: colorArray.count * MemoryLayout.size(ofValue: colorArray[0]),
options: .storageModeShared)
// Compile the functions and other state into a pipeline object.
do {
foregroundRenderPipelineState =
try buildForegroundRenderPipelineState(vertexFunc, fragmentFunction: fragmentFunc)
backgroundRenderPipelineState =
try buildBackgroundRenderPipelineState(vertexFunc, fragmentFunction: fragmentFunc)
} catch {
print("Unable to compile render pipeline state due to error:\(error)")
return nil
}
}
func setupRenderPassDescriptorForTexture(_ texture: MTLTexture) -> MTLRenderPassDescriptor {
/*
MTLRenderPassDescriptor contains attachments that are the rendering destination for pixels
generated by a rendering pass.
*/
let renderPassDescriptor = MTLRenderPassDescriptor()
// Associate the texture object with the attachment.
renderPassDescriptor.colorAttachments[0].texture = texture
// Set color to use when the color attachment is cleared.
renderPassDescriptor.colorAttachments[0].clearColor = MTLClearColor(red: 0, green: 0, blue: 0, alpha: 1.0)
renderPassDescriptor.colorAttachments[0].loadAction = .clear
renderPassDescriptor.colorAttachments[0].storeAction = .store
return renderPassDescriptor
}
func buildForegroundRenderPipelineState(_ vertexFunction: MTLFunction, fragmentFunction: MTLFunction) throws -> MTLRenderPipelineState {
// A MTLRenderPipelineDescriptor object that describes the attributes of the render pipeline state.
let pipelineDescriptor = MTLRenderPipelineDescriptor()
// A string to help identify this object.
pipelineDescriptor.label = "Render Pipeline"
pipelineDescriptor.vertexFunction = vertexFunction
pipelineDescriptor.fragmentFunction = fragmentFunction
// Pixel format of the color attachments texture: BGRA.
pipelineDescriptor.colorAttachments[0].pixelFormat = MTLPixelFormat.bgra8Unorm
pipelineDescriptor.colorAttachments[0].isBlendingEnabled = false
return try device.makeRenderPipelineState(descriptor: pipelineDescriptor)
}
func buildBackgroundRenderPipelineState(_ vertexFunction: MTLFunction, fragmentFunction: MTLFunction) throws -> MTLRenderPipelineState {
// A render pipeline descriptor describes the configuration of our programmable pipeline.
let pipelineDescriptor = MTLRenderPipelineDescriptor()
// A string to help identify the object.
pipelineDescriptor.label = "Render Pipeline - Blending"
// Provide the vertex and shader function and the pixel format to be used.
pipelineDescriptor.vertexFunction = vertexFunction
pipelineDescriptor.fragmentFunction = fragmentFunction
// Pixel format of the color attachments texture: BGRA.
pipelineDescriptor.colorAttachments[0].pixelFormat = MTLPixelFormat.bgra8Unorm
/*
Enable blending. The blend descriptor property values are then used to determine how source and
destination color values are combined.
*/
pipelineDescriptor.colorAttachments[0].isBlendingEnabled = true
// Specify custom blend operations to perform the cross dissolve effect.
// Add portions of both source and destination pixel values.
pipelineDescriptor.colorAttachments[0].rgbBlendOperation = .add
pipelineDescriptor.colorAttachments[0].alphaBlendOperation = .add
// Use Blend factor of one.
pipelineDescriptor.colorAttachments[0].sourceRGBBlendFactor = .one
pipelineDescriptor.colorAttachments[0].sourceAlphaBlendFactor = .one
// Blend factor of 1- alpha value.
pipelineDescriptor.colorAttachments[0].destinationRGBBlendFactor = .oneMinusBlendAlpha
// Blend factor of alpha.
pipelineDescriptor.colorAttachments[0].destinationAlphaBlendFactor = .blendAlpha
return try device.makeRenderPipelineState(descriptor: pipelineDescriptor)
}
func renderTexture(_ renderEncoder: MTLRenderCommandEncoder, texture: MTLTexture,
pipelineState: MTLRenderPipelineState) {
// Set the current render pipeline state object.
renderEncoder.setRenderPipelineState(pipelineState)
// Specify vertex, color and texture buffers for the vertex shader function.
renderEncoder.setVertexBuffer(vertexBuffer, offset:0, at:0)
renderEncoder.setVertexBuffer(colorBuffer, offset:0, at:1)
renderEncoder.setVertexBuffer(textureCoordBuffer, offset: 0, at: 2)
// Set a texture for the fragment shader function.
renderEncoder.setFragmentTexture(texture, at:0)
// Tell the render context we want to draw our primitives.
renderEncoder.drawPrimitives(type: .triangle, vertexStart: 0, vertexCount: 6, instanceCount: 1)
}
override func renderPixelBuffer(_ destinationPixelBuffer: CVPixelBuffer,
usingForegroundSourceBuffer foregroundPixelBuffer: CVPixelBuffer,
andBackgroundSourceBuffer backgroundPixelBuffer: CVPixelBuffer,
forTweenFactor tween: Float) {
// Create a MTLTexture from the CVPixelBuffer.
guard let foregroundTexture = buildTextureForPixelBuffer(foregroundPixelBuffer) else { return }
guard let backgroundTexture = buildTextureForPixelBuffer(backgroundPixelBuffer) else { return }
guard let destinationTexture = buildTextureForPixelBuffer(destinationPixelBuffer) else { return }
/*
We must maintain a reference to the pixel buffer until the Metal rendering is complete. This is because the
'buildTextureForPixelBuffer' function above uses CVMetalTextureCacheCreateTextureFromImage to create a
Metal texture (CVMetalTexture) from the IOSurface that backs the CVPixelBuffer, but
CVMetalTextureCacheCreateTextureFromImage doesn't increment the use count of the IOSurface; only the
CVPixelBuffer, and the CVMTLTexture own this IOSurface. Therefore we must maintain a reference to either
the pixel buffer or Metal texture until the Metal rendering is done. The MTLCommandBuffer completion
handler below is then used to release these references.
*/
pixelBuffers = RenderPixelBuffers(foregroundPixelBuffer,
backgroundTexture: backgroundPixelBuffer,
destinationTexture: destinationPixelBuffer)
// Create a new command buffer for each renderpass to the current drawable.
let commandBuffer = commandQueue.makeCommandBuffer()
commandBuffer.label = "MyCommand"
/*
Obtain a drawable texture for this render pass and set up the renderpass
descriptor for the command encoder to render into.
*/
let renderPassDescriptor = setupRenderPassDescriptorForTexture(destinationTexture)
// Create a render command encoder so we can render into something.
let renderEncoder = commandBuffer.makeRenderCommandEncoder(descriptor: renderPassDescriptor)
renderEncoder.label = "MyRenderEncoder"
guard let foregroundPipelineState = foregroundRenderPipelineState else { return }
// Render foreground texture.
renderTexture(renderEncoder, texture: foregroundTexture,
pipelineState: foregroundPipelineState)
renderEncoder.setBlendColor(red: 0, green: 0, blue: 0, alpha: tween)
guard let backgroundPipelineState = backgroundRenderPipelineState else { return }
// Render background texture.
renderTexture(renderEncoder, texture: backgroundTexture,
pipelineState: backgroundPipelineState)
// We're done encoding commands.
renderEncoder.endEncoding()
// Use the command buffer completion block to release the reference to the pixel buffers.
commandBuffer.addCompletedHandler({ _ in
self.pixelBuffers = nil // Release the reference to the pixel buffers.
})
// Finalize rendering here & push the command buffer to the GPU.
commandBuffer.commit()
}
}