Skip to content

Commit a217868

Browse files
author
chenliming
committed
support External input video or audio ,set LFLiveCaptureTypeMask
1 parent 3053a16 commit a217868

File tree

16 files changed

+232
-115
lines changed

16 files changed

+232
-115
lines changed

LFLiveKit/LFLiveSession.h

+34
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,26 @@
1515
#import "LFLiveVideoConfiguration.h"
1616
#import "LFLiveDebug.h"
1717

18+
typedef NS_ENUM(NSInteger,LFLiveCaptureType) {
19+
LFLiveCaptureAudio, //< capture only audio
20+
LFLiveCaptureVideo, //< capture onlt video
21+
LFLiveInputAudio, //< only audio (External input audio)
22+
LFLiveInputVideo, //< only video (External input video)
23+
};
24+
25+
26+
///< 用来控制采集类型(可以内部采集也可以外部传入等各种组合,支持单音频与单视频,外部输入适用于录屏,无人机等外设介入)
27+
typedef NS_ENUM(NSInteger,LFLiveCaptureTypeMask) {
28+
LFLiveCaptureMaskAudio = (1 << LFLiveCaptureAudio), ///< only inner capture audio (no video)
29+
LFLiveCaptureMaskVideo = (1 << LFLiveCaptureVideo), ///< only inner capture video (no audio)
30+
LFLiveInputMaskAudio = (1 << LFLiveInputAudio), ///< only outer input audio (no video)
31+
LFLiveInputMaskVideo = (1 << LFLiveInputVideo), ///< only outer input video (no audio)
32+
LFLiveCaptureMaskAll = (LFLiveCaptureMaskAudio | LFLiveCaptureMaskVideo), ///< inner capture audio and video
33+
LFLiveInputMaskAll = (LFLiveInputMaskAudio | LFLiveInputMaskVideo), ///< outer input audio and video(method see pushVideo and pushAudio)
34+
LFLiveCaptureMaskAudioInputVideo = (LFLiveCaptureMaskAudio | LFLiveInputMaskVideo), ///< inner capture audio and outer input video(method pushVideo and setRunning)
35+
LFLiveCaptureMaskVideoInputAudio = (LFLiveCaptureMaskVideo | LFLiveInputMaskAudio), ///< inner capture video and outer input audio(method pushAudio and setRunning)
36+
LFLiveCaptureDefaultMask = LFLiveCaptureMaskAll ///< default is inner capture audio and video
37+
};
1838

1939
@class LFLiveSession;
2040
@protocol LFLiveSessionDelegate <NSObject>
@@ -78,6 +98,9 @@
7898
/** The status of the stream .*/
7999
@property (nonatomic, assign, readonly) LFLiveState state;
80100

101+
/** The captureType control inner or outer audio and video .*/
102+
@property (nonatomic, assign, readonly) LFLiveCaptureTypeMask captureType;
103+
81104
/** The showDebugInfo control streamInfo and uploadInfo(1s) *.*/
82105
@property (nonatomic, assign) BOOL showDebugInfo;
83106

@@ -105,12 +128,23 @@
105128
*/
106129
- (nullable instancetype)initWithAudioConfiguration:(nullable LFLiveAudioConfiguration *)audioConfiguration videoConfiguration:(nullable LFLiveVideoConfiguration *)videoConfiguration NS_DESIGNATED_INITIALIZER;
107130

131+
/**
132+
The designated initializer. Multiple instances with the same configuration will make the
133+
capture unstable.
134+
*/
135+
- (nullable instancetype)initWithAudioConfiguration:(nullable LFLiveAudioConfiguration *)audioConfiguration videoConfiguration:(nullable LFLiveVideoConfiguration *)videoConfiguration captureType:(LFLiveCaptureTypeMask)captureType NS_DESIGNATED_INITIALIZER;
136+
108137
/** The start stream .*/
109138
- (void)startLive:(nonnull LFLiveStreamInfo *)streamInfo;
110139

111140
/** The stop stream .*/
112141
- (void)stopLive;
113142

143+
/** support outer input yuv or rgb video(set LFLiveCaptureTypeMask) .*/
144+
- (void)pushVideo:(CVPixelBufferRef)pixelBuffer;
145+
146+
/** support outer input pcm audio(set LFLiveCaptureTypeMask) .*/
147+
- (void)pushAudio:(AudioBufferList)audioBufferList;
114148

115149
@end
116150

LFLiveKit/LFLiveSession.m

+58-28
Original file line numberDiff line numberDiff line change
@@ -17,15 +17,12 @@
1717
#import "LFGPUImageBeautyFilter.h"
1818
#import "LFH264VideoEncoder.h"
1919

20-
#define LFLiveReportKey @"com.youku.liveSessionReport"
2120

2221
@interface LFLiveSession ()<LFAudioCaptureDelegate, LFVideoCaptureDelegate, LFAudioEncodingDelegate, LFVideoEncodingDelegate, LFStreamSocketDelegate>
23-
{
24-
dispatch_semaphore_t _lock;
25-
}
26-
///音频配置
22+
23+
/// 音频配置
2724
@property (nonatomic, strong) LFLiveAudioConfiguration *audioConfiguration;
28-
///视频配置
25+
/// 视频配置
2926
@property (nonatomic, strong) LFLiveVideoConfiguration *videoConfiguration;
3027
/// 声音采集
3128
@property (nonatomic, strong) LFAudioCapture *audioCaptureSource;
@@ -38,17 +35,21 @@ @interface LFLiveSession ()<LFAudioCaptureDelegate, LFVideoCaptureDelegate, LFAu
3835
/// 上传
3936
@property (nonatomic, strong) id<LFStreamSocket> socket;
4037

38+
4139
#pragma mark -- 内部标识
42-
/// 上报
43-
@property (nonatomic, copy) dispatch_block_t reportBlock;
44-
/// debugInfo
40+
/// 调试信息
4541
@property (nonatomic, strong) LFLiveDebug *debugInfo;
46-
/// streamInfo
42+
/// 流信息
4743
@property (nonatomic, strong) LFLiveStreamInfo *streamInfo;
48-
/// uploading
44+
/// 是否开始上传
4945
@property (nonatomic, assign) BOOL uploading;
50-
/// state
46+
/// 当前状态
5147
@property (nonatomic, assign, readwrite) LFLiveState state;
48+
/// 当前直播type
49+
@property (nonatomic, assign, readwrite) LFLiveCaptureTypeMask captureType;
50+
51+
/// 时间戳锁
52+
@property (nonatomic, strong) dispatch_semaphore_t lock;
5253

5354
@end
5455

@@ -68,20 +69,25 @@ @implementation LFLiveSession
6869

6970
#pragma mark -- LifeCycle
7071
- (instancetype)initWithAudioConfiguration:(LFLiveAudioConfiguration *)audioConfiguration videoConfiguration:(LFLiveVideoConfiguration *)videoConfiguration {
71-
if (!audioConfiguration || !videoConfiguration) @throw [NSException exceptionWithName:@"LFLiveSession init error" reason:@"audioConfiguration or videoConfiguration is nil " userInfo:nil];
72+
return [self initWithAudioConfiguration:audioConfiguration videoConfiguration:videoConfiguration captureType:LFLiveCaptureDefaultMask];
73+
}
74+
75+
- (nullable instancetype)initWithAudioConfiguration:(nullable LFLiveAudioConfiguration *)audioConfiguration videoConfiguration:(nullable LFLiveVideoConfiguration *)videoConfiguration captureType:(LFLiveCaptureTypeMask)captureType{
76+
if((captureType & LFLiveCaptureMaskAudio || captureType & LFLiveInputMaskAudio) && !audioConfiguration) @throw [NSException exceptionWithName:@"LFLiveSession init error" reason:@"audioConfiguration is nil " userInfo:nil];
77+
if((captureType & LFLiveCaptureMaskVideo || captureType & LFLiveInputMaskVideo) && !videoConfiguration) @throw [NSException exceptionWithName:@"LFLiveSession init error" reason:@"videoConfiguration is nil " userInfo:nil];
7278
if (self = [super init]) {
7379
_audioConfiguration = audioConfiguration;
7480
_videoConfiguration = videoConfiguration;
75-
_lock = dispatch_semaphore_create(1);
7681
_adaptiveBitrate = NO;
7782
_isFirstFrame = YES;
83+
_captureType = captureType;
7884
}
7985
return self;
8086
}
8187

8288
- (void)dealloc {
83-
self.audioCaptureSource.running = NO;
8489
self.videoCaptureSource.running = NO;
90+
self.audioCaptureSource.running = NO;
8591
}
8692

8793
#pragma mark -- CustomMethod
@@ -90,6 +96,7 @@ - (void)startLive:(LFLiveStreamInfo *)streamInfo {
9096
_streamInfo = streamInfo;
9197
_streamInfo.videoConfiguration = _videoConfiguration;
9298
_streamInfo.audioConfiguration = _audioConfiguration;
99+
_streamInfo.needDropFrame = (self.captureType & LFLiveCaptureMaskVideo || self.captureType & LFLiveInputMaskVideo) ? YES : NO;//< 有视频执行丢帧算法
93100
[self.socket start];
94101
}
95102

@@ -99,12 +106,24 @@ - (void)stopLive {
99106
self.socket = nil;
100107
}
101108

109+
- (void)pushVideo:(CVPixelBufferRef)pixelBuffer{
110+
if(self.captureType & LFLiveInputMaskVideo){
111+
if (self.uploading) [self.videoEncoder encodeVideoData:pixelBuffer timeStamp:self.currentTimestamp];
112+
}
113+
}
114+
115+
- (void)pushAudio:(AudioBufferList)audioBufferList{
116+
if(self.captureType & LFLiveInputMaskAudio){
117+
if (self.uploading) [self.audioEncoder encodeAudioData:audioBufferList timeStamp:self.currentTimestamp];
118+
}
119+
}
120+
102121
#pragma mark -- CaptureDelegate
103122
- (void)captureOutput:(nullable LFAudioCapture *)capture audioBuffer:(AudioBufferList)inBufferList {
104123
if (self.uploading) [self.audioEncoder encodeAudioData:inBufferList timeStamp:self.currentTimestamp];
105124
}
106125

107-
- (void)captureOutput:(nullable LFVideoCapture *)capture pixelBuffer:(nullable CVImageBufferRef)pixelBuffer {
126+
- (void)captureOutput:(nullable LFVideoCapture *)capture pixelBuffer:(nullable CVPixelBufferRef)pixelBuffer {
108127
if (self.uploading) [self.videoEncoder encodeVideoData:pixelBuffer timeStamp:self.currentTimestamp];
109128
}
110129

@@ -156,18 +175,18 @@ - (void)socketDebug:(nullable id<LFStreamSocket>)socket debugInfo:(nullable LFLi
156175
}
157176

158177
- (void)socketBufferStatus:(nullable id<LFStreamSocket>)socket status:(LFLiveBuffferState)status {
159-
if (self.adaptiveBitrate) {
160-
NSUInteger videoBitRate = [_videoEncoder videoBitRate];
178+
if((self.captureType & LFLiveCaptureMaskVideo || self.captureType & LFLiveInputMaskVideo) && self.adaptiveBitrate){
179+
NSUInteger videoBitRate = [self.videoEncoder videoBitRate];
161180
if (status == LFLiveBuffferDecline) {
162181
if (videoBitRate < _videoConfiguration.videoMaxBitRate) {
163182
videoBitRate = videoBitRate + 50 * 1000;
164-
[_videoEncoder setVideoBitRate:videoBitRate];
183+
[self.videoEncoder setVideoBitRate:videoBitRate];
165184
NSLog(@"Increase bitrate %@", @(videoBitRate));
166185
}
167186
} else {
168-
if (videoBitRate > _videoConfiguration.videoMinBitRate) {
187+
if (videoBitRate > self.videoConfiguration.videoMinBitRate) {
169188
videoBitRate = videoBitRate - 100 * 1000;
170-
[_videoEncoder setVideoBitRate:videoBitRate];
189+
[self.videoEncoder setVideoBitRate:videoBitRate];
171190
NSLog(@"Decline bitrate %@", @(videoBitRate));
172191
}
173192
}
@@ -284,16 +303,20 @@ - (UIView*)warterMarkView{
284303

285304
- (LFAudioCapture *)audioCaptureSource {
286305
if (!_audioCaptureSource) {
287-
_audioCaptureSource = [[LFAudioCapture alloc] initWithAudioConfiguration:_audioConfiguration];
288-
_audioCaptureSource.delegate = self;
306+
if(self.captureType & LFLiveCaptureMaskAudio){
307+
_audioCaptureSource = [[LFAudioCapture alloc] initWithAudioConfiguration:_audioConfiguration];
308+
_audioCaptureSource.delegate = self;
309+
}
289310
}
290311
return _audioCaptureSource;
291312
}
292313

293314
- (LFVideoCapture *)videoCaptureSource {
294315
if (!_videoCaptureSource) {
295-
_videoCaptureSource = [[LFVideoCapture alloc] initWithVideoConfiguration:_videoConfiguration];
296-
_videoCaptureSource.delegate = self;
316+
if(self.captureType & LFLiveCaptureMaskVideo){
317+
_videoCaptureSource = [[LFVideoCapture alloc] initWithVideoConfiguration:_videoConfiguration];
318+
_videoCaptureSource.delegate = self;
319+
}
297320
}
298321
return _videoCaptureSource;
299322
}
@@ -320,7 +343,7 @@ - (LFVideoCapture *)videoCaptureSource {
320343

321344
- (id<LFStreamSocket>)socket {
322345
if (!_socket) {
323-
_socket = [[LFStreamRTMPSocket alloc] initWithStream:self.streamInfo videoSize:self.videoConfiguration.videoSize reconnectInterval:self.reconnectInterval reconnectCount:self.reconnectCount];
346+
_socket = [[LFStreamRTMPSocket alloc] initWithStream:self.streamInfo reconnectInterval:self.reconnectInterval reconnectCount:self.reconnectCount];
324347
[_socket setDelegate:self];
325348
}
326349
return _socket;
@@ -333,8 +356,15 @@ - (LFLiveStreamInfo *)streamInfo {
333356
return _streamInfo;
334357
}
335358

359+
- (dispatch_semaphore_t)lock{
360+
if(!_lock){
361+
_lock = dispatch_semaphore_create(1);
362+
}
363+
return _lock;
364+
}
365+
336366
- (uint64_t)currentTimestamp {
337-
dispatch_semaphore_wait(_lock, DISPATCH_TIME_FOREVER);
367+
dispatch_semaphore_wait(self.lock, DISPATCH_TIME_FOREVER);
338368
uint64_t currentts = 0;
339369
if (_isFirstFrame) {
340370
_timestamp = NOW;
@@ -343,7 +373,7 @@ - (uint64_t)currentTimestamp {
343373
} else {
344374
currentts = NOW - _timestamp;
345375
}
346-
dispatch_semaphore_signal(_lock);
376+
dispatch_semaphore_signal(self.lock);
347377
return currentts;
348378
}
349379

LFLiveKit/capture/LFVideoCapture.h

+1-1
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
@class LFVideoCapture;
1414
/** LFVideoCapture callback videoData */
1515
@protocol LFVideoCaptureDelegate <NSObject>
16-
- (void)captureOutput:(nullable LFVideoCapture *)capture pixelBuffer:(nullable CVImageBufferRef)pixelBuffer;
16+
- (void)captureOutput:(nullable LFVideoCapture *)capture pixelBuffer:(nullable CVPixelBufferRef)pixelBuffer;
1717
@end
1818

1919
@interface LFVideoCapture : NSObject

LFLiveKit/capture/LFVideoCapture.m

+5-21
Original file line numberDiff line numberDiff line change
@@ -233,7 +233,7 @@ - (GPUImageAlphaBlendFilter *)blendFilter{
233233
- (UIView *)waterMarkContentView{
234234
if(!_waterMarkContentView){
235235
_waterMarkContentView = [UIView new];
236-
_waterMarkContentView.frame = CGRectMake(0, 0, self.gpuImageView.frame.size.width, self.gpuImageView.frame.size.height);
236+
_waterMarkContentView.frame = CGRectMake(0, 0, self.configuration.videoSize.width, self.configuration.videoSize.height);
237237
_waterMarkContentView.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight;
238238
}
239239
return _waterMarkContentView;
@@ -254,14 +254,6 @@ - (void)processVideo:(GPUImageOutput *)output {
254254
@autoreleasepool {
255255
GPUImageFramebuffer *imageFramebuffer = output.framebufferForOutput;
256256
CVPixelBufferRef pixelBuffer = [imageFramebuffer pixelBuffer];
257-
if(!CGSizeEqualToSize(_self.configuration.videoSize, imageFramebuffer.size)){
258-
NSInteger width = ceil(imageFramebuffer.size.width);
259-
NSInteger height = ceil(imageFramebuffer.size.height);
260-
if(width %2 != 0) width = width + 1;
261-
if(height %2 != 0) height = height + 1;
262-
_self.configuration.videoSize = CGSizeMake(width, height);
263-
_self.waterMarkContentView.frame = CGRectMake(0, 0,_self.configuration.videoSize.width, _self.configuration.videoSize.height);
264-
}
265257
if (pixelBuffer && _self.delegate && [_self.delegate respondsToSelector:@selector(captureOutput:pixelBuffer:)]) {
266258
[_self.delegate captureOutput:_self pixelBuffer:pixelBuffer];
267259
}
@@ -309,18 +301,10 @@ - (void)reloadFilter{
309301
[self.output addTarget:self.gpuImageView];
310302
}
311303

312-
//< 输出大小自适应
313-
if(self.configuration.videoSizeRespectingAspectRatio){
314-
[self.filter forceProcessingAtSizeRespectingAspectRatio:self.configuration.videoSize];
315-
[self.output forceProcessingAtSizeRespectingAspectRatio:self.configuration.videoSize];
316-
[self.blendFilter forceProcessingAtSizeRespectingAspectRatio:self.configuration.videoSize];
317-
[self.uiElementInput forceProcessingAtSizeRespectingAspectRatio:self.configuration.videoSize];
318-
}else{
319-
[self.filter forceProcessingAtSize:self.configuration.videoSize];
320-
[self.output forceProcessingAtSize:self.configuration.videoSize];
321-
[self.blendFilter forceProcessingAtSize:self.configuration.videoSize];
322-
[self.uiElementInput forceProcessingAtSize:self.configuration.videoSize];
323-
}
304+
[self.filter forceProcessingAtSize:self.configuration.videoSize];
305+
[self.output forceProcessingAtSize:self.configuration.videoSize];
306+
[self.blendFilter forceProcessingAtSize:self.configuration.videoSize];
307+
[self.uiElementInput forceProcessingAtSize:self.configuration.videoSize];
324308

325309
//< 输出数据
326310
__weak typeof(self) _self = self;

LFLiveKit/coder/LFHardwareVideoEncoder.m

+1-1
Original file line numberDiff line numberDiff line change
@@ -98,7 +98,7 @@ - (void)dealloc {
9898
}
9999

100100
#pragma mark -- LFVideoEncoder
101-
- (void)encodeVideoData:(CVImageBufferRef)pixelBuffer timeStamp:(uint64_t)timeStamp {
101+
- (void)encodeVideoData:(CVPixelBufferRef)pixelBuffer timeStamp:(uint64_t)timeStamp {
102102
if (_isBackGround) return;
103103

104104
frameCount++;

LFLiveKit/coder/LFVideoEncoding.h

+1-1
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
/// 编码器抽象的接口
2121
@protocol LFVideoEncoding <NSObject>
2222
@required
23-
- (void)encodeVideoData:(nullable CVImageBufferRef)pixelBuffer timeStamp:(uint64_t)timeStamp;
23+
- (void)encodeVideoData:(nullable CVPixelBufferRef)pixelBuffer timeStamp:(uint64_t)timeStamp;
2424
- (void)stopEncoder;
2525
@optional
2626
@property (nonatomic, assign) NSInteger videoBitRate;

0 commit comments

Comments
 (0)