本文整理汇总了C++中JsepTrack::GetStreamId方法的典型用法代码示例。如果您正苦于以下问题:C++ JsepTrack::GetStreamId方法的具体用法?C++ JsepTrack::GetStreamId怎么用?C++ JsepTrack::GetStreamId使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类JsepTrack
的用法示例。
在下文中一共展示了JsepTrack::GetStreamId方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: switch
nsresult
MediaPipelineFactory::ConfigureVideoCodecMode(const JsepTrack& aTrack,
VideoSessionConduit& aConduit)
{
#if !defined(MOZILLA_EXTERNAL_LINKAGE)
RefPtr<LocalSourceStreamInfo> stream =
mPCMedia->GetLocalStreamById(aTrack.GetStreamId());
//get video track
RefPtr<mozilla::dom::VideoStreamTrack> videotrack =
stream->GetVideoTrackByTrackId(aTrack.GetTrackId());
if (!videotrack) {
MOZ_MTLOG(ML_ERROR, "video track not available");
return NS_ERROR_FAILURE;
}
//get video source type
RefPtr<DOMMediaStream> mediastream =
mPCMedia->GetLocalStreamById(aTrack.GetStreamId())->GetMediaStream();
DOMLocalMediaStream* domLocalStream = mediastream->AsDOMLocalMediaStream();
if (!domLocalStream) {
return NS_OK;
}
MediaEngineSource *engine =
domLocalStream->GetMediaEngine(videotrack->GetTrackID());
dom::MediaSourceEnum source = engine->GetMediaSource();
webrtc::VideoCodecMode mode = webrtc::kRealtimeVideo;
switch (source) {
case dom::MediaSourceEnum::Browser:
case dom::MediaSourceEnum::Screen:
case dom::MediaSourceEnum::Application:
case dom::MediaSourceEnum::Window:
mode = webrtc::kScreensharing;
break;
case dom::MediaSourceEnum::Camera:
default:
mode = webrtc::kRealtimeVideo;
break;
}
auto error = aConduit.ConfigureCodecMode(mode);
if (error) {
MOZ_MTLOG(ML_ERROR, "ConfigureCodecMode failed: " << error);
return NS_ERROR_FAILURE;
}
#endif
return NS_OK;
}
示例2: MediaPipelineTransmit
nsresult
MediaPipelineFactory::CreateMediaPipelineSending(
const JsepTrackPair& aTrackPair,
const JsepTrack& aTrack,
size_t aLevel,
RefPtr<TransportFlow> aRtpFlow,
RefPtr<TransportFlow> aRtcpFlow,
nsAutoPtr<MediaPipelineFilter> aFilter,
const RefPtr<MediaSessionConduit>& aConduit)
{
nsresult rv;
// This is checked earlier
RefPtr<LocalSourceStreamInfo> stream =
mPCMedia->GetLocalStreamById(aTrack.GetStreamId());
// Now we have all the pieces, create the pipeline
RefPtr<MediaPipelineTransmit> pipeline = new MediaPipelineTransmit(
mPC->GetHandle(),
mPC->GetMainThread().get(),
mPC->GetSTSThread(),
stream->GetMediaStream(),
aTrack.GetTrackId(),
aLevel,
aTrack.GetMediaType() == SdpMediaSection::kVideo,
aConduit,
aRtpFlow,
aRtcpFlow,
aFilter);
#if !defined(MOZILLA_EXTERNAL_LINKAGE)
// implement checking for peerIdentity (where failure == black/silence)
nsIDocument* doc = mPC->GetWindow()->GetExtantDoc();
if (doc) {
pipeline->UpdateSinkIdentity_m(doc->NodePrincipal(),
mPC->GetPeerIdentity());
} else {
MOZ_MTLOG(ML_ERROR, "Cannot initialize pipeline without attached doc");
return NS_ERROR_FAILURE; // Don't remove this till we know it's safe.
}
#endif
rv = pipeline->Init();
if (NS_FAILED(rv)) {
MOZ_MTLOG(ML_ERROR, "Couldn't initialize sending pipeline");
return rv;
}
rv = stream->StorePipeline(aTrack.GetTrackId(),
RefPtr<MediaPipeline>(pipeline));
if (NS_FAILED(rv)) {
MOZ_MTLOG(ML_ERROR, "Couldn't store receiving pipeline " <<
static_cast<unsigned>(rv));
return rv;
}
return NS_OK;
}
示例3: SanityCheckTracks
void SanityCheckTracks(const JsepTrack& a, const JsepTrack& b) const
{
if (!a.GetNegotiatedDetails()) {
ASSERT_FALSE(!!b.GetNegotiatedDetails());
return;
}
ASSERT_TRUE(!!a.GetNegotiatedDetails());
ASSERT_TRUE(!!b.GetNegotiatedDetails());
ASSERT_EQ(a.GetMediaType(), b.GetMediaType());
ASSERT_EQ(a.GetStreamId(), b.GetStreamId());
ASSERT_EQ(a.GetTrackId(), b.GetTrackId());
ASSERT_EQ(a.GetCNAME(), b.GetCNAME());
ASSERT_NE(a.GetDirection(), b.GetDirection());
ASSERT_EQ(a.GetSsrcs().size(), b.GetSsrcs().size());
for (size_t i = 0; i < a.GetSsrcs().size(); ++i) {
ASSERT_EQ(a.GetSsrcs()[i], b.GetSsrcs()[i]);
}
SanityCheckNegotiatedDetails(*a.GetNegotiatedDetails(),
*b.GetNegotiatedDetails());
}
示例4: MediaPipelineReceiveAudio
nsresult
MediaPipelineFactory::CreateMediaPipelineReceiving(
const JsepTrackPair& aTrackPair,
const JsepTrack& aTrack,
size_t aLevel,
RefPtr<TransportFlow> aRtpFlow,
RefPtr<TransportFlow> aRtcpFlow,
nsAutoPtr<MediaPipelineFilter> aFilter,
const RefPtr<MediaSessionConduit>& aConduit)
{
// We will error out earlier if this isn't here.
RefPtr<RemoteSourceStreamInfo> stream =
mPCMedia->GetRemoteStreamById(aTrack.GetStreamId());
RefPtr<MediaPipelineReceive> pipeline;
TrackID numericTrackId = stream->GetNumericTrackId(aTrack.GetTrackId());
MOZ_ASSERT(numericTrackId != TRACK_INVALID);
bool queue_track = stream->ShouldQueueTracks();
MOZ_MTLOG(ML_DEBUG, __FUNCTION__ << ": Creating pipeline for "
<< numericTrackId << " -> " << aTrack.GetTrackId());
if (aTrack.GetMediaType() == SdpMediaSection::kAudio) {
pipeline = new MediaPipelineReceiveAudio(
mPC->GetHandle(),
mPC->GetMainThread().get(),
mPC->GetSTSThread(),
stream->GetMediaStream()->GetInputStream(),
aTrack.GetTrackId(),
numericTrackId,
aLevel,
static_cast<AudioSessionConduit*>(aConduit.get()), // Ugly downcast.
aRtpFlow,
aRtcpFlow,
aFilter,
queue_track);
} else if (aTrack.GetMediaType() == SdpMediaSection::kVideo) {
pipeline = new MediaPipelineReceiveVideo(
mPC->GetHandle(),
mPC->GetMainThread().get(),
mPC->GetSTSThread(),
stream->GetMediaStream()->GetInputStream(),
aTrack.GetTrackId(),
numericTrackId,
aLevel,
static_cast<VideoSessionConduit*>(aConduit.get()), // Ugly downcast.
aRtpFlow,
aRtcpFlow,
aFilter,
queue_track);
} else {
MOZ_ASSERT(false);
MOZ_MTLOG(ML_ERROR, "Invalid media type in CreateMediaPipelineReceiving");
return NS_ERROR_FAILURE;
}
nsresult rv = pipeline->Init();
if (NS_FAILED(rv)) {
MOZ_MTLOG(ML_ERROR, "Couldn't initialize receiving pipeline");
return rv;
}
rv = stream->StorePipeline(aTrack.GetTrackId(),
RefPtr<MediaPipeline>(pipeline));
if (NS_FAILED(rv)) {
MOZ_MTLOG(ML_ERROR, "Couldn't store receiving pipeline " <<
static_cast<unsigned>(rv));
return rv;
}
stream->SyncPipeline(pipeline);
return NS_OK;
}
示例5: setter
nsresult
MediaPipelineFactory::CreateOrUpdateMediaPipeline(
const JsepTrackPair& aTrackPair,
const JsepTrack& aTrack)
{
#if !defined(MOZILLA_EXTERNAL_LINKAGE)
// The GMP code is all the way on the other side of webrtc.org, and it is not
// feasible to plumb this information all the way through. So, we set it (for
// the duration of this call) in a global variable. This allows the GMP code
// to report errors to the PC.
WebrtcGmpPCHandleSetter setter(mPC->GetHandle());
#endif
MOZ_ASSERT(aTrackPair.mRtpTransport);
bool receiving = aTrack.GetDirection() == sdp::kRecv;
size_t level;
RefPtr<TransportFlow> rtpFlow;
RefPtr<TransportFlow> rtcpFlow;
nsAutoPtr<MediaPipelineFilter> filter;
nsresult rv = GetTransportParameters(aTrackPair,
aTrack,
&level,
&rtpFlow,
&rtcpFlow,
&filter);
if (NS_FAILED(rv)) {
MOZ_MTLOG(ML_ERROR, "Failed to get transport parameters for pipeline, rv="
<< static_cast<unsigned>(rv));
return rv;
}
if (aTrack.GetMediaType() == SdpMediaSection::kApplication) {
// GetTransportParameters has already done everything we need for
// datachannel.
return NS_OK;
}
// Find the stream we need
SourceStreamInfo* stream;
if (receiving) {
stream = mPCMedia->GetRemoteStreamById(aTrack.GetStreamId());
} else {
stream = mPCMedia->GetLocalStreamById(aTrack.GetStreamId());
}
if (!stream) {
MOZ_MTLOG(ML_ERROR, "Negotiated " << (receiving ? "recv" : "send")
<< " stream id " << aTrack.GetStreamId() << " was never added");
MOZ_ASSERT(false);
return NS_ERROR_FAILURE;
}
if (!stream->HasTrack(aTrack.GetTrackId())) {
MOZ_MTLOG(ML_ERROR, "Negotiated " << (receiving ? "recv" : "send")
<< " track id " << aTrack.GetTrackId() << " was never added");
MOZ_ASSERT(false);
return NS_ERROR_FAILURE;
}
RefPtr<MediaSessionConduit> conduit;
if (aTrack.GetMediaType() == SdpMediaSection::kAudio) {
rv = GetOrCreateAudioConduit(aTrackPair, aTrack, &conduit);
if (NS_FAILED(rv))
return rv;
} else if (aTrack.GetMediaType() == SdpMediaSection::kVideo) {
rv = GetOrCreateVideoConduit(aTrackPair, aTrack, &conduit);
if (NS_FAILED(rv))
return rv;
} else {
// We've created the TransportFlow, nothing else to do here.
return NS_OK;
}
RefPtr<MediaPipeline> pipeline =
stream->GetPipelineByTrackId_m(aTrack.GetTrackId());
if (pipeline && pipeline->level() != static_cast<int>(level)) {
MOZ_MTLOG(ML_WARNING, "Track " << aTrack.GetTrackId() <<
" has moved from level " << pipeline->level() <<
" to level " << level <<
". This requires re-creating the MediaPipeline.");
// Since we do not support changing the conduit on a pre-existing
// MediaPipeline
pipeline = nullptr;
stream->RemoveTrack(aTrack.GetTrackId());
stream->AddTrack(aTrack.GetTrackId());
}
if (pipeline) {
pipeline->UpdateTransport_m(level, rtpFlow, rtcpFlow, filter);
return NS_OK;
}
MOZ_MTLOG(ML_DEBUG,
"Creating media pipeline"
<< " m-line index=" << aTrackPair.mLevel
<< " type=" << aTrack.GetMediaType()
//.........这里部分代码省略.........
示例6: GetTransportParameters
nsresult
MediaPipelineFactory::CreateOrUpdateMediaPipeline(
const JsepTrackPair& aTrackPair,
const JsepTrack& aTrack)
{
MOZ_ASSERT(aTrackPair.mRtpTransport);
bool receiving =
aTrack.GetDirection() == JsepTrack::Direction::kJsepTrackReceiving;
size_t level;
RefPtr<TransportFlow> rtpFlow;
RefPtr<TransportFlow> rtcpFlow;
nsAutoPtr<MediaPipelineFilter> filter;
nsresult rv = GetTransportParameters(aTrackPair,
aTrack,
&level,
&rtpFlow,
&rtcpFlow,
&filter);
if (NS_FAILED(rv)) {
MOZ_MTLOG(ML_ERROR, "Failed to get transport parameters for pipeline, rv="
<< static_cast<unsigned>(rv));
return rv;
}
if (aTrack.GetMediaType() == SdpMediaSection::kApplication) {
// GetTransportParameters has already done everything we need for
// datachannel.
return NS_OK;
}
// Find the stream we need
SourceStreamInfo* stream;
if (receiving) {
stream = mPCMedia->GetRemoteStreamById(aTrack.GetStreamId());
} else {
stream = mPCMedia->GetLocalStreamById(aTrack.GetStreamId());
}
if (!stream) {
MOZ_MTLOG(ML_ERROR, "Negotiated " << (receiving ? "recv" : "send")
<< " stream id " << aTrack.GetStreamId() << " was never added");
MOZ_ASSERT(false);
return NS_ERROR_FAILURE;
}
if (!stream->HasTrack(aTrack.GetTrackId())) {
MOZ_MTLOG(ML_ERROR, "Negotiated " << (receiving ? "recv" : "send")
<< " track id " << aTrack.GetTrackId() << " was never added");
MOZ_ASSERT(false);
return NS_ERROR_FAILURE;
}
RefPtr<MediaSessionConduit> conduit;
if (aTrack.GetMediaType() == SdpMediaSection::kAudio) {
rv = GetOrCreateAudioConduit(aTrackPair, aTrack, &conduit);
if (NS_FAILED(rv))
return rv;
} else if (aTrack.GetMediaType() == SdpMediaSection::kVideo) {
rv = GetOrCreateVideoConduit(aTrackPair, aTrack, &conduit);
if (NS_FAILED(rv))
return rv;
} else {
// We've created the TransportFlow, nothing else to do here.
return NS_OK;
}
RefPtr<MediaPipeline> pipeline =
stream->GetPipelineByTrackId_m(aTrack.GetTrackId());
if (pipeline && pipeline->level() != static_cast<int>(level)) {
MOZ_MTLOG(ML_WARNING, "Track " << aTrack.GetTrackId() <<
" has moved from level " << pipeline->level() <<
" to level " << level <<
". This requires re-creating the MediaPipeline.");
// Since we do not support changing the conduit on a pre-existing
// MediaPipeline
pipeline = nullptr;
stream->RemoveTrack(aTrack.GetTrackId());
stream->AddTrack(aTrack.GetTrackId());
}
if (pipeline) {
pipeline->UpdateTransport_m(level, rtpFlow, rtcpFlow, filter);
return NS_OK;
}
MOZ_MTLOG(ML_DEBUG,
"Creating media pipeline"
<< " m-line index=" << aTrackPair.mLevel
<< " type=" << aTrack.GetMediaType()
<< " direction=" << aTrack.GetDirection());
if (receiving) {
rv = CreateMediaPipelineReceiving(aTrackPair, aTrack,
level, rtpFlow, rtcpFlow, filter,
conduit);
if (NS_FAILED(rv))
//.........这里部分代码省略.........