本章节介绍 iOS 端互动直播场景核心功能的实现方式。
互动直播功能需要集成 RTC SDK,请您创建工单联系技术支持获取适配版本 SDK 的集成方法。
说明
请参考 RTC Native SDK API 查看对应版本的 SDK API 说明。
以下是主播端核心功能实现的时序图和参考接入代码。
主播通过 RTC 引擎和推流引擎开始直播推流。
时序图
示例代码
// 初始化 ByteRTCVideo 对象 self.rtcVideo = [ByteRTCVideo createRTCVideo:self.appId delegate:self parameters:@{}]; // 设置本地预览视图 ByteRTCVideoCanvas *canvasView = [[ByteRTCVideoCanvas alloc] init]; canvasView.view = view; canvasView.renderMode = ByteRTCRenderModeHidden; [self.rtcVideo setLocalVideoCanvas:ByteRTCStreamIndexMain withCanvas:canvasView]; // 设置视频编码参数 ByteRTCVideoEncoderConfig *solution = [[ByteRTCVideoEncoderConfig alloc] init]; solution.width = self.config.captureWidth; solution.height = self.config.captureHeight; solution.frameRate = self.config.captureFps; solution.maxBitrate = self.config.videoEncoderKBitrate; [self.rtcVideo setMaxVideoEncoderConfig:solution];
// 订阅本地视频数据 [self.rtcVideo setLocalVideoSink:ByteRTCStreamIndexMain withSink:self withPixelFormat:(ByteRTCVideoSinkPixelFormatBGRA)]; // 订阅本地音频数据 ByteRTCAudioFormat *audioFormat = [[ByteRTCAudioFormat alloc] init]; audioFormat.channel = ByteRTCAudioChannelStereo; audioFormat.sampleRate = ByteRTCAudioSampleRate44100; [self.rtcVideo enableAudioFrameCallback:(ByteRTCAudioFrameCallbackRecord) format:audioFormat]; [self.rtcVideo registerAudioFrameObserver:self];
// 创建推流引擎 self.livePusher = [[VeLivePusher alloc] initWithConfig:[[VeLivePusherConfiguration alloc] init]]; // 配置推流参数 // 视频编码配置 VeLiveVideoEncoderConfiguration *videoEncodeCfg = [[VeLiveVideoEncoderConfiguratio // 视频编码初始化码率 videoEncodeCfg.bitrate = self.config.videoEncoderKBitrate; // 视频编码最小码率 videoEncodeCfg.minBitrate = self.config.videoEncoderKBitrate; // 视频编码最大码率 videoEncodeCfg.maxBitrate = self.config.videoEncoderKBitrate; // 配置帧率 videoEncodeCfg.fps = self.config.videoEncoderFps; // 配置编码 [self.livePusher setVideoEncoderConfiguration:videoEncodeCfg]; // 开启外部视频采集 [self.livePusher startVideoCapture:(VeLiveVideoCaptureExternal)]; // 开启外部音频采集 [self.livePusher startAudioCapture:(VeLiveAudioCaptureExternal)];
// 开始视频采集 [self.rtcVideo startVideoCapture]; // 开始音频采集 [self.rtcVideo startAudioCapture];
// 开始推流 [self.livePusher startPush:url];
// 视频采集回调, 发送视频数据给推流引擎 - (void)renderPixelBuffer:(CVPixelBufferRef)pixelBuffer rotation:(ByteRTCVideoRotation)rotation contentType:(ByteRTCVideoContentType)contentType extendedData:(NSData *)extendedData { CMTime pts = CMTimeMakeWithSeconds(CACurrentMediaTime(), 1000000000); VeLiveVideoFrame *videoFrame = [[VeLiveVideoFrame alloc] init]; videoFrame.pts = pts; videoFrame.pixelBuffer = pixelBuffer; VeLiveVideoRotation videoRotation = VeLiveVideoRotation0; switch (rotation) { case ByteRTCVideoRotation0: videoRotation = VeLiveVideoRotation0; break; case ByteRTCVideoRotation90: videoRotation = VeLiveVideoRotation90; break; case ByteRTCVideoRotation180: videoRotation = VeLiveVideoRotation180; break; case ByteRTCVideoRotation270: videoRotation = VeLiveVideoRotation270; break; default: break; } videoFrame.rotation = videoRotation; videoFrame.bufferType = VeLiveVideoBufferTypePixelBuffer; [self.livePusher pushExternalVideoFrame:videoFrame]; } // 音频采集回调, 发送音频数据给推流引擎 - (void)onRecordAudioFrame:(ByteRTCAudioFrame * _Nonnull)audioFrame { int channel = 2; if (audioFrame.channel == ByteRTCAudioChannelMono) { channel = 1; } else if (audioFrame.channel == ByteRTCAudioChannelStereo) { channel = 2; } CMTime pts = CMTimeMakeWithSeconds(CACurrentMediaTime(), 1000000000); VeLiveAudioFrame *frame = [[VeLiveAudioFrame alloc] init]; frame.bufferType = VeLiveAudioBufferTypeNSData; frame.data = audioFrame.buffer; frame.pts = pts; frame.channels = (VeLiveAudioChannel)channel; frame.sampleRate = VeLiveAudioSampleRate44100; [self.livePusher pushExternalAudioFrame:frame]; }
主播美颜功能都是通过 RTC 引擎进行对接,使用方式请参考 美颜特效(CV)。
主播停止推流引擎推流,通过 RTC 引擎加入房间连麦,并开启 RTC 服务端合流转推。
时序图
代码示例
// 停止推流引擎推流 [self.livePusher stopPush];
// 创建 RTC 房间 self.rtcRoom = [self.rtcVideo createRTCRoom:self.roomId]; self.rtcRoom.delegate = self; // 设置用户信息 ByteRTCUserInfo *userInfo = [[ByteRTCUserInfo alloc] init]; userInfo.userId = self.userId; ByteRTCRoomConfig *config = [ByteRTCRoomConfig new]; config.isAutoPublish = YES; config.isAutoSubscribeAudio = YES; config.isAutoSubscribeVideo = YES; // 加入房间,token 信息通过业务服务器申请 [self.rtcRoom joinRoom:token userInfo:userInfo roomConfig:config];
// 加入房间成功通知 - (void)rtcRoom:(ByteRTCRoom *_Nonnull)rtcRoom onRoomStateChanged:(NSString *_Nonnull)roomId withUid:(nonnull NSString *)uid state:(NSInteger)state extraInfo:(NSString *_Nonnull)extraInfo { // 创建 RTC 服务端合流配置 self.mixStreamConfig = [ByteRTCMixedStreamConfig defaultMixedStreamConfig]; self.mixStreamConfig.roomID = self.roomId; self.mixStreamConfig.userID = self.userId; // 设置视频编码参数。该参数需要和推流视频编码参数保持一致 self.mixStreamConfig.videoConfig.width = self.config.videoEncoderWith; self.mixStreamConfig.videoConfig.height = self.config.videoEncoderHeight; self.mixStreamConfig.videoConfig.fps = self.config.videoEncoderFps; self.mixStreamConfig.videoConfig.bitrate = self.config.videoEncoderKBitrate; // 设置音频编码参数。该参数需要和推流音频编码参数保持一致 self.mixStreamConfig.audioConfig = [[ByteRTCMixedStreamAudioConfig alloc] init]; self.mixStreamConfig.audioConfig.sampleRate = self.config.audioEncoderSampleRate; self.mixStreamConfig.audioConfig.channels = self.config.audioEncoderChannel; self.mixStreamConfig.audioConfig.bitrate = self.config.audioEncoderKBitrate; // 设置推流地址, 这里为主播的 RTMP 推流地址 self.mixStreamConfig.pushURL = self.streamUrl; // 服务端合流 self.mixStreamConfig.expectedMixingType = ByteRTCMixedStreamByServer; // 初始化布局 ByteRTCMixedStreamLayoutConfig * layout = [[ByteRTCMixedStreamLayoutConfig alloc]init]; // 设置背景色 layout.backgroundColor = @"#000000"; //仅供参考 NSMutableArray *regions = [[NSMutableArray alloc]initWithCapacity:6]; // 主播合流布局 ByteRTCMixedStreamLayoutRegionConfig *region = [[ByteRTCMixedStreamLayoutRegionConfig alloc]init]; region.userID = self.userId; // 主播uid region.roomID = self.roomId; region.isLocalUser = YES; region.renderMode = ByteRTCMixedStreamRenderModeHidden; region.locationX = 0.0; //仅供参考 region.locationY = 0.0; //仅供参考 region.widthProportion = 0.5; //仅供参考 region.heightProportion = 0.5; //仅供参考 region.zOrder = 0; //仅供参考 region.alpha = 1.0; //仅供参考 [regions addObject:region]; layout.regions = regions; // 设置合流模版 self.mixStreamConfig.layoutConfig = layout; // 设置合流任务Id self.rtcTaskId = @""; // 开启 RTC 服务端合流转推 [self.rtcVideo startPushMixedStreamToCDN:self.rtcTaskId mixedConfig:self.mixStreamConfig observer:self]; }
- (void)rtcRoom:(ByteRTCRoom *)rtcRoom onUserPublishStream:(NSString *)userId type:(ByteRTCMediaStreamType)type { if (streamType == ByteRTCMediaStreamTypeVideo || streamType == ByteRTCMediaStreamTypeBoth) { // 设置连麦用户视图 ByteRTCVideoCanvas *canvas = [[ByteRTCVideoCanvas alloc] init]; canvas.renderMode = ByteRTCRenderModeHidden; canvas.view.backgroundColor = [UIColor clearColor]; canvas.view = view; ByteRTCRemoteStreamKey *streamKey = [[ByteRTCRemoteStreamKey alloc] init]; streamKey.userId = uid; streamKey.streamIndex = ByteRTCStreamIndexMain; streamKey.roomId = self.roomId; [self.rtcVideo setRemoteVideoCanvas:streamKey withCanvas:canvas]; } // 更新合流模版 NSMutableArray *regions = [[NSMutableArray alloc]initWithCapacity:6]; // 主播合流布局 ByteRTCMixedStreamLayoutRegionConfig *region = [[ByteRTCMixedStreamLayoutRegionConfig alloc]init]; region.userID = self.userId; // 主播uid region.roomID = self.roomId; region.isLocalUser = YES; region.renderMode = ByteRTCMixedStreamRenderModeHidden; region.locationX = 0.0; //仅供参考 region.locationY = 0.0; //仅供参考 region.widthProportion = 0.5; //仅供参考 region.heightProportion = 0.5; //仅供参考 region.zOrder = 0; //仅供参考 region.alpha = 1.0; //仅供参考 [regions addObject:region]; // 连麦用户合流布局 ByteRTCVideoCompositingRegion *regionRemote = [[ByteRTCVideoCompositingRegion alloc]init]; regionRemote.userID = userId; // 连麦uid regionRemote.roomID = self.roomId; regionRemote.isLocalUser = NO; regionRemote.renderMode = ByteRTCMixedStreamRenderModeHidden; regionRemote.locationX = 0.5; //仅供参考 regionRemote.locationY = 0.0; //仅供参考 regionRemote.widthProportion = 0.5; //仅供参考 regionRemote.heightProportion = 0.5; //仅供参考 regionRemote.zOrder = 1; //仅供参考 regionRemote.alpha = 1.0; //仅供参考 [regions addObject:regionRemote]; layout.regions = regions; // 更新合流 self.mixStreamConfig.layoutConfig = layout; [self.rtcVideo updatePushMixedStreamToCDN:self.rtcTaskId mixedConfig:self.mixStreamConfig]; } - (void)manager:(VeLiveAnchorManager *)manager onUserUnPublishStream:(NSString *)uid type:(ByteRTCMediaStreamType)streamType reason:(ByteRTCStreamRemoveReason)reason { if (streamType == ByteRTCMediaStreamTypeVideo || streamType == ByteRTCMediaStreamTypeBoth) { // 设置连麦用户视图 ByteRTCVideoCanvas *canvas = [[ByteRTCVideoCanvas alloc] init]; canvas.renderMode = ByteRTCRenderModeHidden; canvas.view.backgroundColor = [UIColor clearColor]; canvas.view = nil; ByteRTCRemoteStreamKey *streamKey = [[ByteRTCRemoteStreamKey alloc] init]; streamKey.userId = uid; streamKey.streamIndex = ByteRTCStreamIndexMain; streamKey.roomId = self.roomId; [self.rtcVideo setRemoteVideoCanvas:streamKey withCanvas:canvas]; } // 更新合流模版 NSMutableArray *regions = [[NSMutableArray alloc]initWithCapacity:6]; // 只保留主播合流布局 ByteRTCMixedStreamLayoutRegionConfig *region = [[ByteRTCMixedStreamLayoutRegionConfig alloc]init]; region.userID = self.userId; // 主播uid region.roomID = self.roomId; region.isLocalUser = YES; region.renderMode = ByteRTCMixedStreamRenderModeHidden; region.locationX = 0.0; //仅供参考 region.locationY = 0.0; //仅供参考 region.widthProportion = 0.5; //仅供参考 region.heightProportion = 0.5; //仅供参考 region.zOrder = 0; //仅供参考 region.alpha = 1.0; //仅供参考 [regions addObject:region]; layout.regions = regions; // 更新合流 self.mixStreamConfig.layoutConfig = layout; [self.rtcVideo updatePushMixedStreamToCDN:self.rtcTaskId mixedConfig:self.mixStreamConfig]; }
主播停止 RTC 服务端合流转推,离开 RTC 房间,开启推流引擎推流。
时序图
示例代码
// 停止 RTC 服务端合流转推 [self.rtcVideo stopPushStreamToCDN:self.rtcTaskId]; // 离开 RTC 房间 [self.rtcRoom leaveRoom]; // 移除连麦用户视图 ByteRTCVideoCanvas *canvas = [[ByteRTCVideoCanvas alloc] init]; canvas.renderMode = ByteRTCRenderModeHidden; canvas.view.backgroundColor = [UIColor clearColor]; canvas.view = nil; ByteRTCRemoteStreamKey *streamKey = [[ByteRTCRemoteStreamKey alloc] init]; streamKey.userId = uid; streamKey.streamIndex = ByteRTCStreamIndexMain; streamKey.roomId = self.roomId; [self.rtcVideo setRemoteVideoCanvas:streamKey withCanvas:canvas];
// 开始推流引擎推流 [self.livePusher startPush:url];
主播停止直播,销毁 RTC 引擎和推流引擎。
时序图
代码示例
// 停止推流引擎推流 [self.livePusher stopPush]; // 销毁推流引擎 [self.livePusher destroy]; self.livePusher = nil;
// 停止 RTC 视频采集 [self.rtcVideo stopVideoCapture]; // 停止 RTC 音频采集 [self.rtcVideo startAudioCapture]; // 移除 RTC 本地预览视图 ByteRTCVideoCanvas *canvasView = [[ByteRTCVideoCanvas alloc] init]; canvasView.view = nil; canvasView.renderMode = ByteRTCRenderModeHidden; [self.rtcVideo setLocalVideoCanvas:ByteRTCStreamIndexMain withCanvas:canvasView];
// 销毁 RTC 房间 [self.rtcRoom destroy]; self.rtcRoom = nil; // 销毁 RTC 视频引擎 [ByteRTCVideo destroyRTCVideo]; self.rtcVideo = nil;
以下是观众端核心功能实现的时序图和参考接入代码。
观众端通过播放器拉流观看直播。
时序图
示例代码
// 创建播放器 TVLManager *livePlayer = [[TVLManager alloc] init]; self.livePlayer = livePlayer; // 设置播放器回调 [self.livePlayer setObserver:self]; // 播放器参数设置 VeLivePlayerConfiguration *config = [[VeLivePlayerConfiguration alloc]init]; config.enableStatisticsCallback = YES; config.enableLiveDNS = YES; config.enableSei = YES; [self.livePlayer setConfig:config];
// 设置播放器视图 self.livePlayer.playerView.frame = UIScreen.mainScreen.bounds; [self.view addSubview:self.livePlayer.playerView]; // 设置播放地址 [self.livePlayer setPlayUrl:LIVE_PULL_URL]; // 开始播放 [self.livePlayer play];
连麦观众停止播放器拉流播放,通过 RTC 引擎进行连麦。
时序图
示例代码
// 停止播放 [self.livePlayer stop]; // 移除播放器视图 self.livePlayer.playerView.hidden = YES;
// 初始化 ByteRTCVideo 对象 self.rtcVideo = [ByteRTCVideo createRTCVideo:self.appId delegate:self parameters:@{}]; // 设置本地视图 ByteRTCVideoCanvas *canvasView = [[ByteRTCVideoCanvas alloc] init]; canvasView.view = view; canvasView.renderMode = ByteRTCRenderModeHidden; [self.rtcVideo setLocalVideoCanvas:ByteRTCStreamIndexMain withCanvas:canvasView]; // 设置视频编码参数 ByteRTCVideoEncoderConfig *solution = [[ByteRTCVideoEncoderConfig alloc] init]; solution.width = self.config.captureWidth; solution.height = self.config.captureHeight; solution.frameRate = self.config.captureFps; solution.maxBitrate = self.config.videoEncoderKBitrate; [self.rtcVideo setMaxVideoEncoderConfig:solution];
// 开始视频采集 [self.rtcVideo startVideoCapture]; // 开始音频采集 [self.rtcVideo startAudioCapture];
// 创建 RTC 房间 self.rtcRoom = [self.rtcVideo createRTCRoom:self.roomId]; self.rtcRoom.delegate = self; // 设置用户信息 ByteRTCUserInfo *userInfo = [[ByteRTCUserInfo alloc] init]; userInfo.userId = self.userId; // 加入房间,开始连麦 ByteRTCRoomConfig *config = [ByteRTCRoomConfig new]; config.isAutoPublish = YES; config.isAutoSubscribeAudio = YES; config.isAutoSubscribeVideo = YES; // token 信息通过业务服务器申请 [self.rtcRoom joinRoom:token userInfo:userInfo roomConfig:config];
- (void)rtcRoom:(ByteRTCRoom *)rtcRoom onUserPublishStream:(NSString *)userId type:(ByteRTCMediaStreamType)type { if (streamType == ByteRTCMediaStreamTypeVideo || streamType == ByteRTCMediaStreamTypeBoth) { // 设置连麦用户视图 ByteRTCVideoCanvas *canvas = [[ByteRTCVideoCanvas alloc] init]; canvas.renderMode = ByteRTCRenderModeHidden; canvas.view.backgroundColor = [UIColor clearColor]; canvas.view = view; ByteRTCRemoteStreamKey *streamKey = [[ByteRTCRemoteStreamKey alloc] init]; streamKey.userId = uid; streamKey.streamIndex = ByteRTCStreamIndexMain; streamKey.roomId = self.roomId; [self.rtcVideo setRemoteVideoCanvas:streamKey withCanvas:canvas]; } } - (void)manager:(VeLiveAnchorManager *)manager onUserUnPublishStream:(NSString *)uid type:(ByteRTCMediaStreamType)streamType reason:(ByteRTCStreamRemoveReason)reason { if (streamType == ByteRTCMediaStreamTypeVideo || streamType == ByteRTCMediaStreamTypeBoth) { // 移除连麦用户视图 ByteRTCVideoCanvas *canvas = [[ByteRTCVideoCanvas alloc] init]; canvas.renderMode = ByteRTCRenderModeHidden; canvas.view.backgroundColor = [UIColor clearColor]; canvas.view = nil; ByteRTCRemoteStreamKey *streamKey = [[ByteRTCRemoteStreamKey alloc] init]; streamKey.userId = uid; streamKey.streamIndex = ByteRTCStreamIndexMain; streamKey.roomId = self.roomId; [self.rtcVideo setRemoteVideoCanvas:streamKey withCanvas:canvas]; } }
观众连麦美时颜功能都是通过 RTC 引擎进行对接,使用方式请参考 美颜特效(CV)。
观众停止 RTC 引擎连麦,恢复拉流观看直播。
时序图
示例代码
// 离开 RTC 房间 [self.rtcRoom leaveRoom]; // 移除本地视图 ByteRTCVideoCanvas *canvasView = [[ByteRTCVideoCanvas alloc] init]; canvasView.view = nil; // 设置为nil canvasView.renderMode = ByteRTCRenderModeHidden; [self.rtcVideo setLocalVideoCanvas:ByteRTCStreamIndexMain withCanvas:canvasView]; // 移除连麦用户视图 ByteRTCVideoCanvas *canvas = [[ByteRTCVideoCanvas alloc] init]; canvas.renderMode = ByteRTCRenderModeHidden; canvas.view.backgroundColor = [UIColor clearColor]; canvas.view = nil; ByteRTCRemoteStreamKey *streamKey = [[ByteRTCRemoteStreamKey alloc] init]; streamKey.userId = uid; streamKey.streamIndex = ByteRTCStreamIndexMain; streamKey.roomId = self.roomId; [self.rtcVideo setRemoteVideoCanvas:streamKey withCanvas:canvas]; // 停止视频采集 [self.rtcVideo stopVideoCapture]; // 停止音频采集 [self.rtcVideo stopAudioCapture];
// 添加播放器视图 self.livePlayer.playerView.hidden = NO; // 开始播放 [self.livePlayer play];
观众停止观看直播,销毁播放器。
时序图
示例代码
// 停止播放 [self.livePlayer stop]; // 移除播放器视图 self.livePlayer.playerView.hidden = YES; // 销毁播放器 [self.livePlayer destroy]; self.livePlayer = nil
// 销毁 RTC 房间 [self.rtcRoom destroy]; self.rtcRoom = nil; // 销毁 RTC 引擎 [ByteRTCVideo destroyRTCVideo]; self.rtcVideo = nil;