这篇文章主要介绍ios webrtcdemo的实现及相关注意事项。 前面很多人问webrtc android下有webrtcdemo, ios上怎么找不到,放在哪里呢?
答案:webrtcdemo在ios上没有实现,如果要实现也很简单,既然安卓都有了,依葫芦画瓢即可移植到ios上,不过可能要求您熟悉android语法,这里给出ios上的参考代码:
1,音频编码:
webrtc支持很多种音频编码,ilbc,isac,G711,G722,opus等等.默认isac. 实际使用中发现不同手机噪声,回音效果大不一样,这个好像跟音频编码和AEC有很大关系,不过整体而言ios音质效果好多了(毕竟价格摆在那里,另外跟ios的AEC直接集成在了硬件上也有很大关系),小米效果很一般(不是我黑小米,是就事论事,公司就有小米2S).
2,视频编码:
webrtc使用vp8编码,目前也支持vp8编码,不过有人已经将H264加到VP8里面了,因为H264应用时间比较长,用得也比较广泛,有些项目必须兼容以前的东西,至于两种编码的优劣我就不比较了,网上搜索资料一堆.
3,NACK,FEC:
这个也是webrtc的核心,网络状况不好的情况下通过NACK和FEC来解决丢包的问题,有兴趣的可以看代码了解下里面那个KeyFrame的处理方式.花屏跟这个也有很大关系.
4,AudioChannel,VideoChannel:
看代码可以知道,这两个属性相当于是各个模块关联起来的纽带,如transport,encoder,network,rtpRtcp.
以上,如有错误和疑问请纠正或补充,谢谢!
答案:webrtcdemo在ios上没有实现,如果要实现也很简单,既然安卓都有了,依葫芦画瓢即可移植到ios上,不过可能要求您熟悉android语法,这里给出ios上的参考代码:
-(BOOL)initWebrtcObjects
{
//转载请说明出处: RTC_Blacker http://www.cnblogs.com/lingyunhu
if ((voE = webrtc::VoiceEngine::Create()) == NULL) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if ((voeBase = webrtc::VoEBase::GetInterface(voE)) == NULL) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if ((voeCodec = webrtc::VoECodec::GetInterface(voE)) == NULL) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if ((voeFile=webrtc::VoEFile::GetInterface(voE))==NULL) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
}
if ((voeHardware = webrtc::VoEHardware::GetInterface(voE)) == NULL) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if ((voeNetwork = webrtc::VoENetwork::GetInterface(voE)) == NULL) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if ((voeAudioProccessing = webrtc::VoEAudioProcessing::GetInterface(voE)) == NULL) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if ((voeRtpRtcp = webrtc::VoERTP_RTCP::GetInterface(voE)) == NULL) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if(voeBase->Init()!=0){
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
}
if ((viE = webrtc::VideoEngine::Create()) == NULL) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if ((vieBase = webrtc::ViEBase::GetInterface(viE)) == NULL) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if ((vieCapture = webrtc::ViECapture::GetInterface(viE)) == NULL) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if ((vieRender = webrtc::ViERender::GetInterface(viE)) == NULL) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if ((vieCodec = webrtc::ViECodec::GetInterface(viE)) == NULL) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if ((vieNetwork = webrtc::ViENetwork::GetInterface(viE)) == NULL) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if ((vieRtpRtcp = webrtc::ViERTP_RTCP::GetInterface(viE)) == NULL) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if (vieBase->Init() != 0) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
[self initAudioCodec];
[self initVideoCodec];
captureID = 0;
videoChannel = -1;
return TRUE;
}
-(void)initAudioCodec
{
memset(&voeCodecInst, 0, sizeof(webrtc::CodecInst));
if (voeCodec != NULL) {
for (int index=0; index < voeCodec->NumOfCodecs(); index++) {
webrtc::CodecInst ci;
voeCodec->GetCodec(index, ci);
if (strncmp(ci.plname, "ISAC", 4) == 0) {
memcpy(&voeCodecInst, &ci, sizeof(webrtc::CodecInst));
break;
}
}
//voeCodecInst.channels = 1;
//voeCodecInst.rate = -1;
}
}
-(BOOL)start
{
f ((audioChannel = voeBase->CreateChannel())!=0) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if (vieBase->CreateChannel(videoChannel) != 0) {
DebugLog(@"AVErr: %d %s at line %d", vieBase->LastError(),__FUNCTION__, __LINE__);
return FALSE;
}
DebugLog(@"AVInfo: CreateChannel success! %d, %d",videoChannel,audioChannel);
//vieCodec->SetReceiveCodec(videoChannel,videoCodec);
if(voeAudioProccessing->SetAecmMode()!=0){
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
voeAudioProccessing->SetAgcStatus(TRUE, webrtc::kAgcDefault);
voeAudioProccessing->SetNsStatus(TRUE, webrtc::kNsHighSuppression);
_voice_capture_device_index = -1;
voeHardware->SetRecordingDevice(_voice_capture_device_index);
voeHardware->SetPlayoutDevice(_voice_playback_device_index);
if(voeHardware->SetLoudspeakerStatus(true)!=0){
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
}
voeCodec->SetSendCodec(audioChannel, voeCodecInst);
RtpRtcpStreamStruct streamStruct=[self createRtpStreamStruct];
voeChannelTransport=new webrtc::test::VoiceChannelTransport(voeNetwork, audioChannel);
voeChannelTransport->SetLocalReceiver2(localARtpPort.rtp,streamStruct );
voeChannelTransport->SetSendDestination2([remoteIPAddress UTF8String], remoteARtpPort.rtp, remoteARtpPort.rtcp);
if(vieCodec->SetSendCodec(videoChannel, videoCodec) != 0)
{
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
vieRtpRtcp->SetNACKStatus(videoChannel, TRUE);
vieRtpRtcp->SetRTCPStatus(videoChannel, webrtc::kRtcpNonCompound_RFC5506);
vieRtpRtcp->SetKeyFrameRequestMethod(videoChannel, webrtc::kViEKeyFrameRequestPliRtcp);
vieBase->SetVoiceEngine(voE);
if (vieBase->ConnectAudioChannel(videoChannel, audioChannel)) {
DebugLog(@"AVErr:%s at line %d",__FUNCTION__,__LINE__);
return FALSE;
}
if (deviceUniqueID == nil) {
DebugLog(@"AVInfo NumberOfCaptureDevices is %d", vieCapture->NumberOfCaptureDevices());
int list_count=vieCapture->NumberOfCaptureDevices();
if ( list_count> 0) {
int list_number=0;
if (list_count>1) {
list_number=1;//[[AVShareData instance] isUseFrontCamera]?0:1;
}
char device_name[KMaxDeviceNameLength];
char unique_id[KMaxUniqueIdLength];
memset(unique_id, 0, KMaxUniqueIdLength);
vieCapture->GetCaptureDevice(list_number, device_name, KMaxDeviceNameLength, unique_id, KMaxUniqueIdLength);
deviceUniqueID = [NSString stringWithFormat:@"%s", unique_id];
}
}
DebugLog(@"AVInfo deviceUniqueID is %@", deviceUniqueID);
if ((vieCapture->AllocateCaptureDevice([deviceUniqueID UTF8String], deviceUniqueID.length, captureID)) != 0) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
DebugLog(@"AVInfo captureID is %d", captureID);
if (vieCapture->ConnectCaptureDevice(captureID, videoChannel) != 0) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
webrtc::CaptureCapability captureCapability;
captureCapability.width=352;
captureCapability.height=288;
captureCapability.codecType=webrtc::kVideoCodecVP8;
captureCapability.maxFPS=DEFAULT_VIDEO_CODEC_MAX_FRAMERATE;
//vieCapture->SetRotateCapturedFrames(captureID, <#const webrtc::RotateCapturedFrame rotation#>)
if (vieCapture->StartCapture(captureID,captureCapability) != 0) {
//if (vieCapture->StartCapture(captureID) != 0) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if((vieRender->AddRenderer(captureID, [self localRenderView], 0, 0.0, 0.0, 1.0, 1.0)) != 0){
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
/*
if((vieRender->AddRenderer(captureID, [self localRenderView2], 0, 0.0, 0.0, 1.0, 1.0)) != 0){
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
*/
if (vieRender->StartRender(captureID) != 0) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if(vieRender->AddRenderer(videoChannel, [self remoteRenderView], 1, 0.0f, 0.0f, 1.0f, 1.0f)!=0){
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if(vieRender->StartRender(videoChannel)!=0){
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if (vieBase->StartReceive(videoChannel)!=0) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if (vieBase->StartSend(videoChannel)!=0) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if(voeBase->StartReceive(audioChannel) != 0)
{
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if(voeBase->StartPlayout(audioChannel) != 0)
{
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if(voeBase->StartSend(audioChannel) != 0)
{
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
//webrtc::CodecInst ci;
//voeFile->StartRecordingMicrophone(@"a.avi",ci,1000);
DebugLog(@"AVInfo: %s at line %d success!", __FUNCTION__, __LINE__);
return TRUE;
}
-(BOOL)stop
{
if(voeBase->StopSend(audioChannel)!=0){
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if(voeBase->StopReceive(audioChannel)!=0){
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if(voeBase->StopPlayout(audioChannel)!=0){
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if(vieBase->StopSend(videoChannel)!=0){
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if(vieBase->StopReceive(videoChannel)!=0){
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if(vieCapture->StopCapture(captureID)!=0){
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if(vieCapture->ReleaseCaptureDevice(captureID)!=0){
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if(vieRender->StopRender(videoChannel)!=0){
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if(vieRender->RemoveRenderer(videoChannel)!=0){
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if(voeBase->DeleteChannel(audioChannel)!=0){
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if(vieBase->DeleteChannel(videoChannel)!=0){
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
DebugLog(@"AVInfo: %s at line %d success", __FUNCTION__, __LINE__);
return TRUE;
}
相关说明:1,音频编码:
webrtc支持很多种音频编码,ilbc,isac,G711,G722,opus等等.默认isac. 实际使用中发现不同手机噪声,回音效果大不一样,这个好像跟音频编码和AEC有很大关系,不过整体而言ios音质效果好多了(毕竟价格摆在那里,另外跟ios的AEC直接集成在了硬件上也有很大关系),小米效果很一般(不是我黑小米,是就事论事,公司就有小米2S).
2,视频编码:
webrtc使用vp8编码,目前也支持vp8编码,不过有人已经将H264加到VP8里面了,因为H264应用时间比较长,用得也比较广泛,有些项目必须兼容以前的东西,至于两种编码的优劣我就不比较了,网上搜索资料一堆.
3,NACK,FEC:
这个也是webrtc的核心,网络状况不好的情况下通过NACK和FEC来解决丢包的问题,有兴趣的可以看代码了解下里面那个KeyFrame的处理方式.花屏跟这个也有很大关系.
4,AudioChannel,VideoChannel:
看代码可以知道,这两个属性相当于是各个模块关联起来的纽带,如transport,encoder,network,rtpRtcp.
以上,如有错误和疑问请纠正或补充,谢谢!
收藏的用户(0) X
正在加载信息~
推荐阅读
最新回复 (0)
站点信息
- 文章2313
- 用户1336
- 访客11757999
每日一句
Life is short; Live it!
人生苦短,活出精彩。
人生苦短,活出精彩。
新会员