Skip to content

Commit 5d43c66

Browse files
committed
AVCustomEdit: Version 3.0, 2017-08-17
Added Swift target which renders the frames using Metal off screen rendering. Signed-off-by: Liu Lantao <liulantao@gmail.com>
1 parent 7f10065 commit 5d43c66

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

48 files changed

+5761
-0
lines changed
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,258 @@
1+
/*
2+
Copyright (C) 2017 Apple Inc. All Rights Reserved.
3+
See LICENSE.txt for this sample’s licensing information
4+
5+
Abstract:
6+
APLCrossDissolveRenderer subclass of APLMetalRenderer, renders the given source buffers to perform a cross
7+
dissolve over the time range of the transition.
8+
*/
9+
10+
import Foundation
11+
import CoreVideo
12+
import MetalKit
13+
14+
class APLCrossDissolveRenderer: APLMetalRenderer {
15+
16+
/// Vertex coordinates used for drawing our geomentric primitives (triangles).
17+
fileprivate let vertexArray: [Float] = [
18+
19+
-1.0, 1.0, 0, 1,
20+
-1.0, -1.0, 0, 1,
21+
1.0, -1.0, 0, 1,
22+
-1.0, 1.0, 0, 1,
23+
1.0, -1.0, 0, 1,
24+
1.0, 1.0, 0, 1
25+
]
26+
27+
/// Texture coordinates used for drawing textures in the texture coordinate system.
28+
fileprivate let textureCoordsArray: [Float] = [
29+
30+
0.0, 0.0,
31+
0.0, 1.0,
32+
1.0, 1.0,
33+
0.0, 0.0,
34+
1.0, 1.0,
35+
1.0, 0.0
36+
]
37+
38+
/// The colors for each vertex coordinate.
39+
fileprivate let colorArray: [Float] = [
40+
41+
1, 0, 0, 1,
42+
0, 1, 0, 1,
43+
0, 0, 1, 1,
44+
1, 0, 0, 1,
45+
0, 0, 1, 1,
46+
1, 0, 1, 1
47+
]
48+
49+
/// MTLRenderPipelineState objects that contains compiled rendering state, including vertex and fragment shaders.
50+
fileprivate var foregroundRenderPipelineState: MTLRenderPipelineState?
51+
fileprivate var backgroundRenderPipelineState: MTLRenderPipelineState?
52+
53+
/// MTLBuffer used for vertex data.
54+
fileprivate var vertexBuffer: MTLBuffer?
55+
/// MTLBuffer used for texture data.
56+
fileprivate var textureCoordBuffer: MTLBuffer?
57+
/// MTLBuffer used for color data.
58+
fileprivate var colorBuffer: MTLBuffer?
59+
60+
/*
61+
Instance of RenderPixelBuffers to maintain references to pixel buffers until they are no longer
62+
needed.
63+
*/
64+
fileprivate var pixelBuffers: RenderPixelBuffers?
65+
66+
override init?() {
67+
68+
super.init()
69+
70+
// The default library contains all of the shader functions that were compiled into our app bundle.
71+
guard let library = device.newDefaultLibrary() else { return nil }
72+
73+
// Retrieve the functions that will comprise our pipeline.
74+
75+
// Load the vertex program into the library
76+
guard let vertexFunc = library.makeFunction(name: "passthroughVertexShader") else { return nil }
77+
78+
// Load the fragment program into the library
79+
guard let fragmentFunc = library.makeFunction(name: "texturedQuadFragmentShader") else { return nil }
80+
81+
vertexBuffer =
82+
device.makeBuffer(bytes: vertexArray,
83+
length: vertexArray.count * MemoryLayout.size(ofValue: vertexArray[0]),
84+
options: .storageModeShared)
85+
86+
textureCoordBuffer =
87+
device.makeBuffer(bytes: textureCoordsArray,
88+
length: textureCoordsArray.count * MemoryLayout.size(ofValue: textureCoordsArray[0]),
89+
options: .storageModeShared)
90+
91+
colorBuffer =
92+
device.makeBuffer(bytes: colorArray,
93+
length: colorArray.count * MemoryLayout.size(ofValue: colorArray[0]),
94+
options: .storageModeShared)
95+
96+
// Compile the functions and other state into a pipeline object.
97+
do {
98+
foregroundRenderPipelineState =
99+
try buildForegroundRenderPipelineState(vertexFunc, fragmentFunction: fragmentFunc)
100+
101+
backgroundRenderPipelineState =
102+
try buildBackgroundRenderPipelineState(vertexFunc, fragmentFunction: fragmentFunc)
103+
} catch {
104+
print("Unable to compile render pipeline state due to error:\(error)")
105+
return nil
106+
}
107+
}
108+
109+
func setupRenderPassDescriptorForTexture(_ texture: MTLTexture) -> MTLRenderPassDescriptor {
110+
111+
/*
112+
MTLRenderPassDescriptor contains attachments that are the rendering destination for pixels
113+
generated by a rendering pass.
114+
*/
115+
let renderPassDescriptor = MTLRenderPassDescriptor()
116+
117+
// Associate the texture object with the attachment.
118+
renderPassDescriptor.colorAttachments[0].texture = texture
119+
// Set color to use when the color attachment is cleared.
120+
renderPassDescriptor.colorAttachments[0].clearColor = MTLClearColor(red: 0, green: 0, blue: 0, alpha: 1.0)
121+
renderPassDescriptor.colorAttachments[0].loadAction = .clear
122+
renderPassDescriptor.colorAttachments[0].storeAction = .store
123+
124+
return renderPassDescriptor
125+
}
126+
127+
func buildForegroundRenderPipelineState(_ vertexFunction: MTLFunction, fragmentFunction: MTLFunction) throws -> MTLRenderPipelineState {
128+
129+
// A MTLRenderPipelineDescriptor object that describes the attributes of the render pipeline state.
130+
let pipelineDescriptor = MTLRenderPipelineDescriptor()
131+
132+
// A string to help identify this object.
133+
pipelineDescriptor.label = "Render Pipeline"
134+
pipelineDescriptor.vertexFunction = vertexFunction
135+
pipelineDescriptor.fragmentFunction = fragmentFunction
136+
// Pixel format of the color attachments texture: BGRA.
137+
pipelineDescriptor.colorAttachments[0].pixelFormat = MTLPixelFormat.bgra8Unorm
138+
pipelineDescriptor.colorAttachments[0].isBlendingEnabled = false
139+
140+
return try device.makeRenderPipelineState(descriptor: pipelineDescriptor)
141+
}
142+
143+
func buildBackgroundRenderPipelineState(_ vertexFunction: MTLFunction, fragmentFunction: MTLFunction) throws -> MTLRenderPipelineState {
144+
145+
// A render pipeline descriptor describes the configuration of our programmable pipeline.
146+
let pipelineDescriptor = MTLRenderPipelineDescriptor()
147+
148+
// A string to help identify the object.
149+
pipelineDescriptor.label = "Render Pipeline - Blending"
150+
// Provide the vertex and shader function and the pixel format to be used.
151+
pipelineDescriptor.vertexFunction = vertexFunction
152+
pipelineDescriptor.fragmentFunction = fragmentFunction
153+
// Pixel format of the color attachments texture: BGRA.
154+
pipelineDescriptor.colorAttachments[0].pixelFormat = MTLPixelFormat.bgra8Unorm
155+
156+
/*
157+
Enable blending. The blend descriptor property values are then used to determine how source and
158+
destination color values are combined.
159+
*/
160+
pipelineDescriptor.colorAttachments[0].isBlendingEnabled = true
161+
162+
// Specify custom blend operations to perform the cross dissolve effect.
163+
164+
// Add portions of both source and destination pixel values.
165+
pipelineDescriptor.colorAttachments[0].rgbBlendOperation = .add
166+
pipelineDescriptor.colorAttachments[0].alphaBlendOperation = .add
167+
// Use Blend factor of one.
168+
pipelineDescriptor.colorAttachments[0].sourceRGBBlendFactor = .one
169+
pipelineDescriptor.colorAttachments[0].sourceAlphaBlendFactor = .one
170+
// Blend factor of 1- alpha value.
171+
pipelineDescriptor.colorAttachments[0].destinationRGBBlendFactor = .oneMinusBlendAlpha
172+
// Blend factor of alpha.
173+
pipelineDescriptor.colorAttachments[0].destinationAlphaBlendFactor = .blendAlpha
174+
175+
return try device.makeRenderPipelineState(descriptor: pipelineDescriptor)
176+
}
177+
178+
func renderTexture(_ renderEncoder: MTLRenderCommandEncoder, texture: MTLTexture,
179+
pipelineState: MTLRenderPipelineState) {
180+
181+
// Set the current render pipeline state object.
182+
renderEncoder.setRenderPipelineState(pipelineState)
183+
184+
// Specify vertex, color and texture buffers for the vertex shader function.
185+
renderEncoder.setVertexBuffer(vertexBuffer, offset:0, at:0)
186+
renderEncoder.setVertexBuffer(colorBuffer, offset:0, at:1)
187+
renderEncoder.setVertexBuffer(textureCoordBuffer, offset: 0, at: 2)
188+
189+
// Set a texture for the fragment shader function.
190+
renderEncoder.setFragmentTexture(texture, at:0)
191+
192+
// Tell the render context we want to draw our primitives.
193+
renderEncoder.drawPrimitives(type: .triangle, vertexStart: 0, vertexCount: 6, instanceCount: 1)
194+
}
195+
196+
override func renderPixelBuffer(_ destinationPixelBuffer: CVPixelBuffer,
197+
usingForegroundSourceBuffer foregroundPixelBuffer: CVPixelBuffer,
198+
andBackgroundSourceBuffer backgroundPixelBuffer: CVPixelBuffer,
199+
forTweenFactor tween: Float) {
200+
201+
// Create a MTLTexture from the CVPixelBuffer.
202+
guard let foregroundTexture = buildTextureForPixelBuffer(foregroundPixelBuffer) else { return }
203+
guard let backgroundTexture = buildTextureForPixelBuffer(backgroundPixelBuffer) else { return }
204+
guard let destinationTexture = buildTextureForPixelBuffer(destinationPixelBuffer) else { return }
205+
206+
/*
207+
We must maintain a reference to the pixel buffer until the Metal rendering is complete. This is because the
208+
'buildTextureForPixelBuffer' function above uses CVMetalTextureCacheCreateTextureFromImage to create a
209+
Metal texture (CVMetalTexture) from the IOSurface that backs the CVPixelBuffer, but
210+
CVMetalTextureCacheCreateTextureFromImage doesn't increment the use count of the IOSurface; only the
211+
CVPixelBuffer, and the CVMTLTexture own this IOSurface. Therefore we must maintain a reference to either
212+
the pixel buffer or Metal texture until the Metal rendering is done. The MTLCommandBuffer completion
213+
handler below is then used to release these references.
214+
*/
215+
pixelBuffers = RenderPixelBuffers(foregroundPixelBuffer,
216+
backgroundTexture: backgroundPixelBuffer,
217+
destinationTexture: destinationPixelBuffer)
218+
219+
// Create a new command buffer for each renderpass to the current drawable.
220+
let commandBuffer = commandQueue.makeCommandBuffer()
221+
commandBuffer.label = "MyCommand"
222+
223+
/*
224+
Obtain a drawable texture for this render pass and set up the renderpass
225+
descriptor for the command encoder to render into.
226+
*/
227+
let renderPassDescriptor = setupRenderPassDescriptorForTexture(destinationTexture)
228+
229+
// Create a render command encoder so we can render into something.
230+
let renderEncoder = commandBuffer.makeRenderCommandEncoder(descriptor: renderPassDescriptor)
231+
renderEncoder.label = "MyRenderEncoder"
232+
233+
guard let foregroundPipelineState = foregroundRenderPipelineState else { return }
234+
235+
// Render foreground texture.
236+
renderTexture(renderEncoder, texture: foregroundTexture,
237+
pipelineState: foregroundPipelineState)
238+
239+
renderEncoder.setBlendColor(red: 0, green: 0, blue: 0, alpha: tween)
240+
241+
guard let backgroundPipelineState = backgroundRenderPipelineState else { return }
242+
243+
// Render background texture.
244+
renderTexture(renderEncoder, texture: backgroundTexture,
245+
pipelineState: backgroundPipelineState)
246+
247+
// We're done encoding commands.
248+
renderEncoder.endEncoding()
249+
250+
// Use the command buffer completion block to release the reference to the pixel buffers.
251+
commandBuffer.addCompletedHandler({ _ in
252+
self.pixelBuffers = nil // Release the reference to the pixel buffers.
253+
})
254+
255+
// Finalize rendering here & push the command buffer to the GPU.
256+
commandBuffer.commit()
257+
}
258+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,124 @@
1+
/*
2+
Copyright (C) 2017 Apple Inc. All Rights Reserved.
3+
See LICENSE.txt for this sample’s licensing information
4+
5+
Abstract:
6+
Custom video composition instruction class implementing AVVideoCompositionInstruction protocol.
7+
*/
8+
9+
import AVFoundation
10+
import Foundation
11+
import CoreMedia
12+
13+
class APLCustomVideoCompositionInstruction: NSObject, AVVideoCompositionInstructionProtocol {
14+
15+
/// ID used by subclasses to identify the foreground frame.
16+
var foregroundTrackID: CMPersistentTrackID = kCMPersistentTrackID_Invalid
17+
/// ID used by subclasses to identify the background frame.
18+
var backgroundTrackID: CMPersistentTrackID = kCMPersistentTrackID_Invalid
19+
20+
/// The timeRange during which instructions will be effective.
21+
var overrideTimeRange: CMTimeRange = CMTimeRange()
22+
/// Indicates whether post-processing should be skipped for the duration of the instruction.
23+
var overrideEnablePostProcessing = false
24+
25+
/// Indicates whether to avoid some duplicate processing when rendering a frame from the same source and destinatin at different times.
26+
var overrideContainsTweening = false
27+
/// The track IDs required to compose frames for the instruction.
28+
var overrideRequiredSourceTrackIDs: [NSValue]?
29+
/// Track ID of the source frame when passthrough is in effect.
30+
var overridePassthroughTrackID: CMPersistentTrackID = 0
31+
32+
/*
33+
If for the duration of the instruction, the video composition result is one of the source frames, this property
34+
should return the corresponding track ID. The compositor won't be run for the duration of the instruction and
35+
the proper source frame will be used instead.
36+
*/
37+
var passthroughTrackID: CMPersistentTrackID {
38+
39+
get {
40+
return self.overridePassthroughTrackID
41+
}
42+
set {
43+
self.overridePassthroughTrackID = newValue
44+
}
45+
}
46+
47+
/*
48+
List of video track IDs required to compose frames for this instruction. If the value of this property
49+
is nil, all source tracks will be considered required for composition.
50+
*/
51+
var requiredSourceTrackIDs: [NSValue]? {
52+
53+
get {
54+
return self.overrideRequiredSourceTrackIDs
55+
}
56+
57+
set {
58+
self.overrideRequiredSourceTrackIDs = newValue
59+
}
60+
}
61+
62+
// Indicates the timeRange during which the instruction is effective.
63+
var timeRange: CMTimeRange {
64+
65+
get {
66+
return self.overrideTimeRange
67+
}
68+
69+
set(newTimeRange) {
70+
self.overrideTimeRange = newTimeRange
71+
}
72+
}
73+
74+
// If NO, indicates that post-processing should be skipped for the duration of this instruction.
75+
var enablePostProcessing: Bool {
76+
77+
get {
78+
return self.overrideEnablePostProcessing
79+
}
80+
81+
set(newPostProcessing) {
82+
self.overrideEnablePostProcessing = newPostProcessing
83+
}
84+
}
85+
86+
/*
87+
If YES, rendering a frame from the same source buffers and the same composition instruction at 2 different
88+
compositionTime may yield different output frames. If NO, 2 such compositions would yield the
89+
same frame. The media pipeline may me able to avoid some duplicate processing when containsTweening is NO.
90+
*/
91+
var containsTweening: Bool {
92+
93+
get {
94+
return self.overrideContainsTweening
95+
}
96+
97+
set(newContainsTweening) {
98+
self.overrideContainsTweening = newContainsTweening
99+
}
100+
}
101+
102+
init(thePassthroughTrackID: CMPersistentTrackID, forTimeRange theTimeRange: CMTimeRange) {
103+
super.init()
104+
105+
passthroughTrackID = thePassthroughTrackID
106+
timeRange = theTimeRange
107+
108+
requiredSourceTrackIDs = [NSValue]()
109+
containsTweening = false
110+
enablePostProcessing = false
111+
}
112+
113+
init(theSourceTrackIDs: [NSValue], forTimeRange theTimeRange: CMTimeRange) {
114+
super.init()
115+
116+
requiredSourceTrackIDs = theSourceTrackIDs
117+
timeRange = theTimeRange
118+
119+
passthroughTrackID = kCMPersistentTrackID_Invalid
120+
containsTweening = true
121+
enablePostProcessing = false
122+
}
123+
124+
}

0 commit comments

Comments
 (0)