本文整理汇总了C++中nsAutoPtr::begin方法的典型用法代码示例。如果您正苦于以下问题:C++ nsAutoPtr::begin方法的具体用法?C++ nsAutoPtr::begin怎么用?C++ nsAutoPtr::begin使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类nsAutoPtr
的用法示例。
在下文中一共展示了nsAutoPtr::begin方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: OnStatsReport_m
static void OnStatsReport_m(
nsMainThreadPtrHandle<WebrtcGlobalStatisticsCallback> aStatsCallback,
nsAutoPtr<RTCStatsQueries> aQueryList)
{
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(aQueryList);
WebrtcGlobalStatisticsReport report;
report.mReports.Construct();
// Reports for the currently active PeerConnections
for (auto q = aQueryList->begin(); q != aQueryList->end(); ++q) {
MOZ_ASSERT(*q);
report.mReports.Value().AppendElement(*(*q)->report);
}
PeerConnectionCtx* ctx = GetPeerConnectionCtx();
if (ctx) {
// Reports for closed/destroyed PeerConnections
report.mReports.Value().AppendElements(ctx->mStatsForClosedPeerConnections);
}
ErrorResult rv;
aStatsCallback.get()->Call(report, rv);
if (rv.Failed()) {
CSFLogError(logTag, "Error firing stats observer callback");
}
}
示例2: GetAllStats_s
static void GetAllStats_s(
nsMainThreadPtrHandle<WebrtcGlobalStatisticsCallback> aStatsCallback,
nsAutoPtr<RTCStatsQueries> aQueryList)
{
MOZ_ASSERT(aQueryList);
for (auto q = aQueryList->begin(); q != aQueryList->end(); ++q) {
MOZ_ASSERT(*q);
PeerConnectionImpl::ExecuteStatsQuery_s(*q);
}
NS_DispatchToMainThread(WrapRunnableNM(&OnStatsReport_m,
aStatsCallback,
aQueryList),
NS_DISPATCH_NORMAL);
}
示例3: OnGetLogging_m
static void OnGetLogging_m(
nsMainThreadPtrHandle<WebrtcGlobalLoggingCallback> aLoggingCallback,
const std::string& aPattern,
nsAutoPtr<std::deque<std::string>> aLogList)
{
ErrorResult rv;
if (!aLogList->empty()) {
Sequence<nsString> nsLogs;
for (auto l = aLogList->begin(); l != aLogList->end(); ++l) {
nsLogs.AppendElement(NS_ConvertUTF8toUTF16(l->c_str()));
}
aLoggingCallback.get()->Call(nsLogs, rv);
}
if (rv.Failed()) {
CSFLogError(logTag, "Error firing logging observer callback");
}
}
示例4: FindId
static void
EverySecondTelemetryCallback_s(nsAutoPtr<RTCStatsQueries> aQueryList) {
using namespace Telemetry;
if(!PeerConnectionCtx::isActive()) {
return;
}
PeerConnectionCtx *ctx = PeerConnectionCtx::GetInstance();
for (auto q = aQueryList->begin(); q != aQueryList->end(); ++q) {
PeerConnectionImpl::ExecuteStatsQuery_s(*q);
auto& r = *(*q)->report;
if (r.mInboundRTPStreamStats.WasPassed()) {
// First, get reports from a second ago, if any, for calculations below
const Sequence<RTCInboundRTPStreamStats> *lastInboundStats = nullptr;
{
auto i = FindId(ctx->mLastReports, r.mPcid);
if (i != ctx->mLastReports.NoIndex) {
lastInboundStats = &ctx->mLastReports[i]->mInboundRTPStreamStats.Value();
}
}
// Then, look for the things we want telemetry on
auto& array = r.mInboundRTPStreamStats.Value();
for (decltype(array.Length()) i = 0; i < array.Length(); i++) {
auto& s = array[i];
bool isAudio = (s.mId.Value().Find("audio") != -1);
if (s.mPacketsLost.WasPassed()) {
Accumulate(s.mIsRemote?
(isAudio? WEBRTC_AUDIO_QUALITY_OUTBOUND_PACKETLOSS :
WEBRTC_VIDEO_QUALITY_OUTBOUND_PACKETLOSS) :
(isAudio? WEBRTC_AUDIO_QUALITY_INBOUND_PACKETLOSS :
WEBRTC_VIDEO_QUALITY_INBOUND_PACKETLOSS),
s.mPacketsLost.Value());
}
if (s.mJitter.WasPassed()) {
Accumulate(s.mIsRemote?
(isAudio? WEBRTC_AUDIO_QUALITY_OUTBOUND_JITTER :
WEBRTC_VIDEO_QUALITY_OUTBOUND_JITTER) :
(isAudio? WEBRTC_AUDIO_QUALITY_INBOUND_JITTER :
WEBRTC_VIDEO_QUALITY_INBOUND_JITTER),
s.mJitter.Value());
}
if (s.mMozRtt.WasPassed()) {
MOZ_ASSERT(s.mIsRemote);
Accumulate(isAudio? WEBRTC_AUDIO_QUALITY_OUTBOUND_RTT :
WEBRTC_VIDEO_QUALITY_OUTBOUND_RTT,
s.mMozRtt.Value());
}
if (lastInboundStats && s.mBytesReceived.WasPassed()) {
auto& laststats = *lastInboundStats;
auto i = FindId(laststats, s.mId.Value());
if (i != laststats.NoIndex) {
auto& lasts = laststats[i];
if (lasts.mBytesReceived.WasPassed()) {
auto delta_ms = int32_t(s.mTimestamp.Value() -
lasts.mTimestamp.Value());
if (delta_ms > 0 && delta_ms < 60000) {
Accumulate(s.mIsRemote?
(isAudio? WEBRTC_AUDIO_QUALITY_OUTBOUND_BANDWIDTH_KBITS :
WEBRTC_VIDEO_QUALITY_OUTBOUND_BANDWIDTH_KBITS) :
(isAudio? WEBRTC_AUDIO_QUALITY_INBOUND_BANDWIDTH_KBITS :
WEBRTC_VIDEO_QUALITY_INBOUND_BANDWIDTH_KBITS),
((s.mBytesReceived.Value() -
lasts.mBytesReceived.Value()) * 8) / delta_ms);
}
}
}
}
}
}
}
// Steal and hang on to reports for the next second
ctx->mLastReports.Clear();
for (auto q = aQueryList->begin(); q != aQueryList->end(); ++q) {
ctx->mLastReports.AppendElement((*q)->report.forget()); // steal avoids copy
}
// Container must be freed back on main thread
NS_DispatchToMainThread(WrapRunnableNM(&FreeOnMain_m, aQueryList),
NS_DISPATCH_NORMAL);
}
示例5: FindId
static void
EverySecondTelemetryCallback_s(nsAutoPtr<RTCStatsQueries> aQueryList) {
using namespace Telemetry;
if(!PeerConnectionCtx::isActive()) {
return;
}
PeerConnectionCtx *ctx = PeerConnectionCtx::GetInstance();
for (auto q = aQueryList->begin(); q != aQueryList->end(); ++q) {
PeerConnectionImpl::ExecuteStatsQuery_s(*q);
auto& r = *(*q)->report;
bool isHello = (*q)->isHello;
if (r.mInboundRTPStreamStats.WasPassed()) {
// First, get reports from a second ago, if any, for calculations below
const Sequence<RTCInboundRTPStreamStats> *lastInboundStats = nullptr;
{
auto i = FindId(ctx->mLastReports, r.mPcid);
if (i != ctx->mLastReports.NoIndex) {
lastInboundStats = &ctx->mLastReports[i]->mInboundRTPStreamStats.Value();
}
}
// Then, look for the things we want telemetry on
auto& array = r.mInboundRTPStreamStats.Value();
for (decltype(array.Length()) i = 0; i < array.Length(); i++) {
auto& s = array[i];
bool isAudio = (s.mId.Value().Find("audio") != -1);
if (s.mPacketsLost.WasPassed() && s.mPacketsReceived.WasPassed() &&
(s.mPacketsLost.Value() + s.mPacketsReceived.Value()) != 0) {
ID id;
if (s.mIsRemote) {
id = isAudio ? WEBRTC_AUDIO_QUALITY_OUTBOUND_PACKETLOSS_RATE :
WEBRTC_VIDEO_QUALITY_OUTBOUND_PACKETLOSS_RATE;
} else {
id = isAudio ? WEBRTC_AUDIO_QUALITY_INBOUND_PACKETLOSS_RATE :
WEBRTC_VIDEO_QUALITY_INBOUND_PACKETLOSS_RATE;
}
// *1000 so we can read in 10's of a percent (permille)
Accumulate(id,
(s.mPacketsLost.Value() * 1000) /
(s.mPacketsLost.Value() + s.mPacketsReceived.Value()));
}
if (s.mJitter.WasPassed()) {
ID id;
if (s.mIsRemote) {
id = isAudio ? WEBRTC_AUDIO_QUALITY_OUTBOUND_JITTER :
WEBRTC_VIDEO_QUALITY_OUTBOUND_JITTER;
} else {
id = isAudio ? WEBRTC_AUDIO_QUALITY_INBOUND_JITTER :
WEBRTC_VIDEO_QUALITY_INBOUND_JITTER;
}
Accumulate(id, s.mJitter.Value());
}
if (s.mMozRtt.WasPassed()) {
MOZ_ASSERT(s.mIsRemote);
ID id;
if (isAudio) {
id = isHello ? LOOP_AUDIO_QUALITY_OUTBOUND_RTT :
WEBRTC_AUDIO_QUALITY_OUTBOUND_RTT;
} else {
id = isHello ? LOOP_VIDEO_QUALITY_OUTBOUND_RTT :
WEBRTC_VIDEO_QUALITY_OUTBOUND_RTT;
}
Accumulate(id, s.mMozRtt.Value());
}
if (lastInboundStats && s.mBytesReceived.WasPassed()) {
auto& laststats = *lastInboundStats;
auto i = FindId(laststats, s.mId.Value());
if (i != laststats.NoIndex) {
auto& lasts = laststats[i];
if (lasts.mBytesReceived.WasPassed()) {
auto delta_ms = int32_t(s.mTimestamp.Value() -
lasts.mTimestamp.Value());
// In theory we're called every second, so delta *should* be in that range.
// Small deltas could cause errors due to division
if (delta_ms > 500 && delta_ms < 60000) {
ID id;
if (s.mIsRemote) {
if (isAudio) {
id = isHello ? LOOP_AUDIO_QUALITY_OUTBOUND_BANDWIDTH_KBITS :
WEBRTC_AUDIO_QUALITY_OUTBOUND_BANDWIDTH_KBITS;
} else {
id = isHello ? LOOP_VIDEO_QUALITY_OUTBOUND_BANDWIDTH_KBITS :
WEBRTC_VIDEO_QUALITY_OUTBOUND_BANDWIDTH_KBITS;
}
} else {
if (isAudio) {
id = isHello ? LOOP_AUDIO_QUALITY_INBOUND_BANDWIDTH_KBITS :
WEBRTC_AUDIO_QUALITY_INBOUND_BANDWIDTH_KBITS;
} else {
id = isHello ? LOOP_VIDEO_QUALITY_INBOUND_BANDWIDTH_KBITS :
WEBRTC_VIDEO_QUALITY_INBOUND_BANDWIDTH_KBITS;
}
}
Accumulate(id, ((s.mBytesReceived.Value() -
lasts.mBytesReceived.Value()) * 8) / delta_ms);
}
// We could accumulate values until enough time has passed
// and then Accumulate() but this isn't that important.
}
//.........这里部分代码省略.........