本文整理汇总了C++中wrapUnique函数的典型用法代码示例。如果您正苦于以下问题:C++ wrapUnique函数的具体用法?C++ wrapUnique怎么用?C++ wrapUnique使用的例子?那么, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了wrapUnique函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: m_maxPendingTicks
SharedContextRateLimiter::SharedContextRateLimiter(unsigned maxPendingTicks)
: m_maxPendingTicks(maxPendingTicks), m_canUseSyncQueries(false) {
m_contextProvider = wrapUnique(
Platform::current()->createSharedOffscreenGraphicsContext3DProvider());
if (!m_contextProvider)
return;
gpu::gles2::GLES2Interface* gl = m_contextProvider->contextGL();
if (gl && gl->GetGraphicsResetStatusKHR() == GL_NO_ERROR) {
std::unique_ptr<Extensions3DUtil> extensionsUtil =
Extensions3DUtil::create(gl);
// TODO(junov): when the GLES 3.0 command buffer is ready, we could use
// fenceSync instead.
m_canUseSyncQueries =
extensionsUtil->supportsExtension("GL_CHROMIUM_sync_query");
}
}
示例2: sinkId
std::unique_ptr<WebMediaPlayer> FrameLoaderClientImpl::createWebMediaPlayer(
HTMLMediaElement& htmlMediaElement,
const WebMediaPlayerSource& source,
WebMediaPlayerClient* client) {
WebLocalFrameImpl* webFrame =
WebLocalFrameImpl::fromFrame(htmlMediaElement.document().frame());
if (!webFrame || !webFrame->client())
return nullptr;
HTMLMediaElementEncryptedMedia& encryptedMedia =
HTMLMediaElementEncryptedMedia::from(htmlMediaElement);
WebString sinkId(HTMLMediaElementAudioOutputDevice::sinkId(htmlMediaElement));
return wrapUnique(webFrame->client()->createMediaPlayer(
source, client, &encryptedMedia, encryptedMedia.contentDecryptionModule(),
sinkId));
}
示例3: ASSERT
void SelectorFilter::pushParent(Element& parent) {
ASSERT(parent.document().inStyleRecalc());
ASSERT(parent.inActiveDocument());
if (m_parentStack.isEmpty()) {
ASSERT(parent == parent.document().documentElement());
ASSERT(!m_ancestorIdentifierFilter);
m_ancestorIdentifierFilter = wrapUnique(new IdentifierFilter);
pushParentStackFrame(parent);
return;
}
ASSERT(m_ancestorIdentifierFilter);
// We may get invoked for some random elements in some wacky cases during
// style resolve. Pause maintaining the stack in this case.
if (m_parentStack.last().element != parent.parentOrShadowHostElement())
return;
pushParentStackFrame(parent);
}
示例4: tryGetMessageFrom
static bool tryGetMessageFrom(
WebMessagePortChannel& webChannel,
RefPtr<SerializedScriptValue>& message,
std::unique_ptr<MessagePortChannelArray>& channels) {
WebString messageString;
WebMessagePortChannelArray webChannels;
if (!webChannel.tryGetMessage(&messageString, webChannels))
return false;
if (webChannels.size()) {
channels = wrapUnique(new MessagePortChannelArray(webChannels.size()));
for (size_t i = 0; i < webChannels.size(); ++i)
(*channels)[i] = WebMessagePortChannelUniquePtr(webChannels[i]);
}
message = SerializedScriptValue::create(messageString);
return true;
}
示例5: fillWithEmptyClients
void HTMLTextFormControlElementTest::SetUp() {
Page::PageClients pageClients;
fillWithEmptyClients(pageClients);
m_spellCheckerClient = wrapUnique(new DummySpellCheckerClient);
pageClients.spellCheckerClient = m_spellCheckerClient.get();
m_dummyPageHolder = DummyPageHolder::create(IntSize(800, 600), &pageClients);
m_document = &m_dummyPageHolder->document();
m_document->documentElement()->setInnerHTML(
"<body><textarea id=textarea></textarea><input id=input /></body>",
ASSERT_NO_EXCEPTION);
m_document->view()->updateAllLifecyclePhases();
m_textControl =
toHTMLTextFormControlElement(m_document->getElementById("textarea"));
m_textControl->focus();
m_input = toHTMLInputElement(m_document->getElementById("input"));
}
示例6: ASSERT
std::unique_ptr<PatternData> LayoutSVGResourcePattern::buildPatternData(
const LayoutObject& object) {
// If we couldn't determine the pattern content element root, stop here.
const PatternAttributes& attributes = this->attributes();
if (!attributes.patternContentElement())
return nullptr;
// An empty viewBox disables layout.
if (attributes.hasViewBox() && attributes.viewBox().isEmpty())
return nullptr;
ASSERT(element());
// Compute tile metrics.
FloatRect clientBoundingBox = object.objectBoundingBox();
FloatRect tileBounds = SVGLengthContext::resolveRectangle(
element(), attributes.patternUnits(), clientBoundingBox, *attributes.x(),
*attributes.y(), *attributes.width(), *attributes.height());
if (tileBounds.isEmpty())
return nullptr;
AffineTransform tileTransform;
if (attributes.hasViewBox()) {
if (attributes.viewBox().isEmpty())
return nullptr;
tileTransform = SVGFitToViewBox::viewBoxToViewTransform(
attributes.viewBox(), attributes.preserveAspectRatio(),
tileBounds.width(), tileBounds.height());
} else {
// A viewbox overrides patternContentUnits, per spec.
if (attributes.patternContentUnits() ==
SVGUnitTypes::kSvgUnitTypeObjectboundingbox)
tileTransform.scale(clientBoundingBox.width(),
clientBoundingBox.height());
}
std::unique_ptr<PatternData> patternData = wrapUnique(new PatternData);
patternData->pattern =
Pattern::createPicturePattern(asPicture(tileBounds, tileTransform));
// Compute pattern space transformation.
patternData->transform.translate(tileBounds.x(), tileBounds.y());
patternData->transform.preMultiply(attributes.patternTransform());
return patternData;
}
示例7: ActiveScriptWrappable
MediaRecorder::MediaRecorder(ExecutionContext* context,
MediaStream* stream,
const MediaRecorderOptions& options,
ExceptionState& exceptionState)
: ActiveScriptWrappable(this),
ActiveDOMObject(context),
m_stream(stream),
m_streamAmountOfTracks(stream->getTracks().size()),
m_mimeType(options.hasMimeType() ? options.mimeType() : kDefaultMimeType),
m_stopped(true),
m_ignoreMutedMedia(true),
m_audioBitsPerSecond(0),
m_videoBitsPerSecond(0),
m_state(State::Inactive),
m_dispatchScheduledEventRunner(AsyncMethodRunner<MediaRecorder>::create(
this,
&MediaRecorder::dispatchScheduledEvent)) {
DCHECK(m_stream->getTracks().size());
m_recorderHandler =
wrapUnique(Platform::current()->createMediaRecorderHandler());
DCHECK(m_recorderHandler);
if (!m_recorderHandler) {
exceptionState.throwDOMException(
NotSupportedError, "No MediaRecorder handler can be created.");
return;
}
AllocateVideoAndAudioBitrates(exceptionState, context, options, stream,
&m_audioBitsPerSecond, &m_videoBitsPerSecond);
const ContentType contentType(m_mimeType);
if (!m_recorderHandler->initialize(
this, stream->descriptor(), contentType.type(),
contentType.parameter("codecs"), m_audioBitsPerSecond,
m_videoBitsPerSecond)) {
exceptionState.throwDOMException(
NotSupportedError,
"Failed to initialize native MediaRecorder the type provided (" +
m_mimeType + ") is not supported.");
return;
}
m_stopped = false;
}
示例8: DCHECK
void PresentationReceiver::onReceiverConnectionAvailable(
WebPresentationConnectionClient* connectionClient) {
DCHECK(connectionClient);
// take() will call PresentationReceiver::registerConnection()
// and register the connection.
auto connection =
PresentationConnection::take(this, wrapUnique(connectionClient));
// receiver.connectionList property not accessed
if (!m_connectionListProperty)
return;
if (m_connectionListProperty->getState() ==
ScriptPromisePropertyBase::Pending)
m_connectionListProperty->resolve(m_connectionList);
else if (m_connectionListProperty->getState() ==
ScriptPromisePropertyBase::Resolved)
m_connectionList->dispatchConnectionAvailableEvent(connection);
}
示例9: wrapUnique
void PaintPropertyTreeBuilder::updateLocalBorderBoxContext(
const LayoutObject& object,
PaintPropertyTreeBuilderContext& context) {
// Avoid adding an ObjectPaintProperties for non-boxes to save memory, since
// we don't need them at the moment.
if (!object.isBox() && !object.hasLayer())
return;
std::unique_ptr<ObjectPaintProperties::PropertyTreeStateWithOffset>
borderBoxContext =
wrapUnique(new ObjectPaintProperties::PropertyTreeStateWithOffset(
context.current.paintOffset,
PropertyTreeState(context.current.transform, context.current.clip,
context.currentEffect,
context.current.scroll)));
object.getMutableForPainting()
.ensurePaintProperties()
.setLocalBorderBoxProperties(std::move(borderBoxContext));
}
示例10: TEST
TEST(ImageDecoderTest, clearCacheExceptFramePreverveClearExceptFrame)
{
const size_t numFrames = 10;
std::unique_ptr<TestImageDecoder> decoder(wrapUnique(new TestImageDecoder()));
decoder->initFrames(numFrames);
Vector<ImageFrame, 1>& frameBuffers = decoder->frameBufferCache();
for (size_t i = 0; i < numFrames; ++i)
frameBuffers[i].setStatus(ImageFrame::FrameComplete);
decoder->resetRequiredPreviousFrames();
decoder->clearCacheExceptFrame(5);
for (size_t i = 0; i < numFrames; ++i) {
SCOPED_TRACE(testing::Message() << i);
if (i == 5)
EXPECT_EQ(ImageFrame::FrameComplete, frameBuffers[i].getStatus());
else
EXPECT_EQ(ImageFrame::FrameEmpty, frameBuffers[i].getStatus());
}
}
示例11: DCHECK
void WebSharedWorkerImpl::didFinishDocumentLoad(WebLocalFrame* frame) {
DCHECK(!m_loadingDocument);
DCHECK(!m_mainScriptLoader);
m_networkProvider = wrapUnique(
m_client->createServiceWorkerNetworkProvider(frame->dataSource()));
m_mainScriptLoader = WorkerScriptLoader::create();
m_mainScriptLoader->setRequestContext(
WebURLRequest::RequestContextSharedWorker);
m_loadingDocument = toWebLocalFrameImpl(frame)->frame()->document();
m_mainScriptLoader->loadAsynchronously(
*m_loadingDocument.get(), m_url, DenyCrossOriginRequests,
m_creationAddressSpace,
bind(&WebSharedWorkerImpl::didReceiveScriptLoaderResponse,
WTF::unretained(this)),
bind(&WebSharedWorkerImpl::onScriptLoaderFinished,
WTF::unretained(this)));
// Do nothing here since onScriptLoaderFinished() might have been already
// invoked and |this| might have been deleted at this point.
}
示例12: m_options
inline SearchBuffer::SearchBuffer(const String& target, FindOptions options)
: m_options(options),
m_prefixLength(0),
m_numberOfCharactersJustAppended(0),
m_atBreak(true),
m_needsMoreContext(options & AtWordStarts),
m_targetRequiresKanaWorkaround(containsKanaLetters(target)) {
DCHECK(!target.isEmpty()) << target;
target.appendTo(m_target);
// FIXME: We'd like to tailor the searcher to fold quote marks for us instead
// of doing it in a separate replacement pass here, but ICU doesn't offer a
// way to add tailoring on top of the locale-specific tailoring as of this
// writing.
foldQuoteMarksAndSoftHyphens(m_target.data(), m_target.size());
size_t targetLength = m_target.size();
m_buffer.reserveInitialCapacity(
std::max(targetLength * 8, kMinimumSearchBufferSize));
m_overlap = m_buffer.capacity() / 4;
if ((m_options & AtWordStarts) && targetLength) {
const UChar32 targetFirstCharacter =
getCodePointAt(m_target.data(), 0, targetLength);
// Characters in the separator category never really occur at the beginning
// of a word, so if the target begins with such a character, we just ignore
// the AtWordStart option.
if (isSeparator(targetFirstCharacter)) {
m_options &= ~AtWordStarts;
m_needsMoreContext = false;
}
}
m_textSearcher = wrapUnique(new TextSearcherICU());
m_textSearcher->setPattern(StringView(m_target.data(), m_target.size()),
!(m_options & CaseInsensitive));
// The kana workaround requires a normalized copy of the target string.
if (m_targetRequiresKanaWorkaround)
normalizeCharactersIntoNFCForm(m_target.data(), m_target.size(),
m_normalizedTarget);
}
示例13: wrapUnique
bool SerializedScriptValueReaderForModules::readRTCCertificate(
v8::Local<v8::Value>* value) {
String pemPrivateKey;
if (!readWebCoreString(&pemPrivateKey))
return false;
String pemCertificate;
if (!readWebCoreString(&pemCertificate))
return false;
std::unique_ptr<WebRTCCertificateGenerator> certificateGenerator =
wrapUnique(Platform::current()->createRTCCertificateGenerator());
std::unique_ptr<WebRTCCertificate> certificate(
certificateGenerator->fromPEM(pemPrivateKey, pemCertificate));
RTCCertificate* jsCertificate = new RTCCertificate(std::move(certificate));
*value =
toV8(jsCertificate, getScriptState()->context()->Global(), isolate());
return !value->IsEmpty();
}
示例14: wrapUnique
RTCDTMFSender* RTCDTMFSender::create(
ExecutionContext* context,
WebRTCPeerConnectionHandler* peerConnectionHandler,
MediaStreamTrack* track,
ExceptionState& exceptionState) {
std::unique_ptr<WebRTCDTMFSenderHandler> handler =
wrapUnique(peerConnectionHandler->createDTMFSender(track->component()));
if (!handler) {
exceptionState.throwDOMException(NotSupportedError,
"The MediaStreamTrack provided is not an "
"element of a MediaStream that's "
"currently in the local streams set.");
return nullptr;
}
RTCDTMFSender* dtmfSender =
new RTCDTMFSender(context, track, std::move(handler));
dtmfSender->suspendIfNeeded();
return dtmfSender;
}
示例15: wrapUnique
void PaintLayerStackingNode::collectLayers(
std::unique_ptr<Vector<PaintLayerStackingNode*>>& posBuffer,
std::unique_ptr<Vector<PaintLayerStackingNode*>>& negBuffer) {
if (layer()->isInTopLayer())
return;
if (isStacked()) {
std::unique_ptr<Vector<PaintLayerStackingNode*>>& buffer =
(zIndex() >= 0) ? posBuffer : negBuffer;
if (!buffer)
buffer = wrapUnique(new Vector<PaintLayerStackingNode*>);
buffer->append(this);
}
if (!isStackingContext()) {
for (PaintLayer* child = layer()->firstChild(); child;
child = child->nextSibling())
child->stackingNode()->collectLayers(posBuffer, negBuffer);
}
}