本文整理汇总了PHP中LinksUpdate::queueRecursiveJobsForTable方法的典型用法代码示例。如果您正苦于以下问题:PHP LinksUpdate::queueRecursiveJobsForTable方法的具体用法?PHP LinksUpdate::queueRecursiveJobsForTable怎么用?PHP LinksUpdate::queueRecursiveJobsForTable使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类LinksUpdate
的用法示例。
在下文中一共展示了LinksUpdate::queueRecursiveJobsForTable方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的PHP代码示例。
示例1: doDeleteUpdates
/**
* Do some database updates after deletion
*
* @param int $id The page_id value of the page being deleted
* @param Content|null $content Optional page content to be used when determining
* the required updates. This may be needed because $this->getContent()
* may already return null when the page proper was deleted.
* @param Revision|null $revision The latest page revision
*/
public function doDeleteUpdates($id, Content $content = null, Revision $revision = null)
{
try {
$countable = $this->isCountable();
} catch (Exception $ex) {
// fallback for deleting broken pages for which we cannot load the content for
// some reason. Note that doDeleteArticleReal() already logged this problem.
$countable = false;
}
// Update site status
DeferredUpdates::addUpdate(new SiteStatsUpdate(0, 1, -(int) $countable, -1));
// Delete pagelinks, update secondary indexes, etc
$updates = $this->getDeletionUpdates($content);
foreach ($updates as $update) {
DeferredUpdates::addUpdate($update);
}
// Reparse any pages transcluding this page
LinksUpdate::queueRecursiveJobsForTable($this->mTitle, 'templatelinks');
// Reparse any pages including this image
if ($this->mTitle->getNamespace() == NS_FILE) {
LinksUpdate::queueRecursiveJobsForTable($this->mTitle, 'imagelinks');
}
// Clear caches
WikiPage::onArticleDelete($this->mTitle);
ResourceLoaderWikiModule::invalidateModuleCache($this->mTitle, $revision, null, wfWikiID());
// Reset this object and the Title object
$this->loadFromRow(false, self::READ_LATEST);
// Search engine
DeferredUpdates::addUpdate(new SearchUpdate($id, $this->mTitle));
}
示例2: recordUpload2
//.........这里部分代码省略.........
# version to continue anyway, because that's better than having
# an image that's not fixable by user operations.
$reupload = true;
# Collision, this is an update of a file
# Insert previous contents into oldimage
$dbw->insertSelect('oldimage', 'image', array('oi_name' => 'img_name', 'oi_archive_name' => $dbw->addQuotes($oldver), 'oi_size' => 'img_size', 'oi_width' => 'img_width', 'oi_height' => 'img_height', 'oi_bits' => 'img_bits', 'oi_timestamp' => 'img_timestamp', 'oi_description' => 'img_description', 'oi_user' => 'img_user', 'oi_user_text' => 'img_user_text', 'oi_metadata' => 'img_metadata', 'oi_media_type' => 'img_media_type', 'oi_major_mime' => 'img_major_mime', 'oi_minor_mime' => 'img_minor_mime', 'oi_sha1' => 'img_sha1'), array('img_name' => $this->getName()), __METHOD__);
# Update the current image row
$dbw->update('image', array('img_size' => $this->size, 'img_width' => intval($this->width), 'img_height' => intval($this->height), 'img_bits' => $this->bits, 'img_media_type' => $this->media_type, 'img_major_mime' => $this->major_mime, 'img_minor_mime' => $this->minor_mime, 'img_timestamp' => $timestamp, 'img_description' => $comment, 'img_user' => $user->getId(), 'img_user_text' => $user->getName(), 'img_metadata' => $dbw->encodeBlob($this->metadata), 'img_sha1' => $this->sha1), array('img_name' => $this->getName()), __METHOD__);
} else {
# This is a new file, so update the image count
DeferredUpdates::addUpdate(SiteStatsUpdate::factory(array('images' => 1)));
}
$descTitle = $this->getTitle();
$wikiPage = new WikiFilePage($descTitle);
$wikiPage->setFile($this);
# Add the log entry
$action = $reupload ? 'overwrite' : 'upload';
$logEntry = new ManualLogEntry('upload', $action);
$logEntry->setPerformer($user);
$logEntry->setComment($comment);
$logEntry->setTarget($descTitle);
// Allow people using the api to associate log entries with the upload.
// Log has a timestamp, but sometimes different from upload timestamp.
$logEntry->setParameters(array('img_sha1' => $this->sha1, 'img_timestamp' => $timestamp));
// Note we keep $logId around since during new image
// creation, page doesn't exist yet, so log_page = 0
// but we want it to point to the page we're making,
// so we later modify the log entry.
// For a similar reason, we avoid making an RC entry
// now and wait until the page exists.
$logId = $logEntry->insert();
$exists = $descTitle->exists();
if ($exists) {
// Page exists, do RC entry now (otherwise we wait for later).
$logEntry->publish($logId);
}
wfProfileIn(__METHOD__ . '-edit');
if ($exists) {
# Create a null revision
$latest = $descTitle->getLatestRevID();
$editSummary = LogFormatter::newFromEntry($logEntry)->getPlainActionText();
$nullRevision = Revision::newNullRevision($dbw, $descTitle->getArticleID(), $editSummary, false);
if (!is_null($nullRevision)) {
$nullRevision->insertOn($dbw);
wfRunHooks('NewRevisionFromEditComplete', array($wikiPage, $nullRevision, $latest, $user));
$wikiPage->updateRevisionOn($dbw, $nullRevision);
}
}
# Commit the transaction now, in case something goes wrong later
# The most important thing is that files don't get lost, especially archives
# NOTE: once we have support for nested transactions, the commit may be moved
# to after $wikiPage->doEdit has been called.
$dbw->commit(__METHOD__);
# Save to memcache.
# We shall not saveToCache before the commit since otherwise
# in case of a rollback there is an usable file from memcached
# which in fact doesn't really exist (bug 24978)
$this->saveToCache();
if ($exists) {
# Invalidate the cache for the description page
$descTitle->invalidateCache();
$descTitle->purgeSquid();
} else {
# New file; create the description page.
# There's already a log entry, so don't make a second RC entry
# Squid and file cache for the description page are purged by doEditContent.
$content = ContentHandler::makeContent($pageText, $descTitle);
$status = $wikiPage->doEditContent($content, $comment, EDIT_NEW | EDIT_SUPPRESS_RC, false, $user);
$dbw->begin(__METHOD__);
// XXX; doEdit() uses a transaction
// Now that the page exists, make an RC entry.
$logEntry->publish($logId);
if (isset($status->value['revision'])) {
$dbw->update('logging', array('log_page' => $status->value['revision']->getPage()), array('log_id' => $logId), __METHOD__);
}
$dbw->commit(__METHOD__);
// commit before anything bad can happen
}
wfProfileOut(__METHOD__ . '-edit');
if ($reupload) {
# Delete old thumbnails
wfProfileIn(__METHOD__ . '-purge');
$this->purgeThumbnails();
wfProfileOut(__METHOD__ . '-purge');
# Remove the old file from the squid cache
SquidUpdate::purge(array($this->getURL()));
}
# Hooks, hooks, the magic of hooks...
wfProfileIn(__METHOD__ . '-hooks');
wfRunHooks('FileUpload', array($this, $reupload, $descTitle->exists()));
wfProfileOut(__METHOD__ . '-hooks');
# Invalidate cache for all pages using this file
$update = new HTMLCacheUpdate($this->getTitle(), 'imagelinks');
$update->doUpdate();
if (!$reupload) {
LinksUpdate::queueRecursiveJobsForTable($this->getTitle(), 'imagelinks');
}
wfProfileOut(__METHOD__);
return true;
}
示例3: recordUpload2
//.........这里部分代码省略.........
$logEntry->setComment($comment);
$logEntry->setTarget($descTitle);
// Allow people using the api to associate log entries with the upload.
// Log has a timestamp, but sometimes different from upload timestamp.
$logEntry->setParameters(array('img_sha1' => $this->sha1, 'img_timestamp' => $timestamp));
// Note we keep $logId around since during new image
// creation, page doesn't exist yet, so log_page = 0
// but we want it to point to the page we're making,
// so we later modify the log entry.
// For a similar reason, we avoid making an RC entry
// now and wait until the page exists.
$logId = $logEntry->insert();
if ($descTitle->exists()) {
// Use own context to get the action text in content language
$formatter = LogFormatter::newFromEntry($logEntry);
$formatter->setContext(RequestContext::newExtraneousContext($descTitle));
$editSummary = $formatter->getPlainActionText();
$nullRevision = Revision::newNullRevision($dbw, $descId, $editSummary, false, $user);
if ($nullRevision) {
$nullRevision->insertOn($dbw);
Hooks::run('NewRevisionFromEditComplete', array($wikiPage, $nullRevision, $nullRevision->getParentId(), $user));
$wikiPage->updateRevisionOn($dbw, $nullRevision);
// Associate null revision id
$logEntry->setAssociatedRevId($nullRevision->getId());
}
$newPageContent = null;
} else {
// Make the description page and RC log entry post-commit
$newPageContent = ContentHandler::makeContent($pageText, $descTitle);
}
# Defer purges, page creation, and link updates in case they error out.
# The most important thing is that files and the DB registry stay synced.
$dbw->endAtomic(__METHOD__);
# Do some cache purges after final commit so that:
# a) Changes are more likely to be seen post-purge
# b) They won't cause rollback of the log publish/update above
$that = $this;
$dbw->onTransactionIdle(function () use($that, $reupload, $wikiPage, $newPageContent, $comment, $user, $logEntry, $logId, $descId, $tags) {
# Update memcache after the commit
$that->invalidateCache();
$updateLogPage = false;
if ($newPageContent) {
# New file page; create the description page.
# There's already a log entry, so don't make a second RC entry
# CDN and file cache for the description page are purged by doEditContent.
$status = $wikiPage->doEditContent($newPageContent, $comment, EDIT_NEW | EDIT_SUPPRESS_RC, false, $user);
if (isset($status->value['revision'])) {
// Associate new page revision id
$logEntry->setAssociatedRevId($status->value['revision']->getId());
}
// This relies on the resetArticleID() call in WikiPage::insertOn(),
// which is triggered on $descTitle by doEditContent() above.
if (isset($status->value['revision'])) {
/** @var $rev Revision */
$rev = $status->value['revision'];
$updateLogPage = $rev->getPage();
}
} else {
# Existing file page: invalidate description page cache
$wikiPage->getTitle()->invalidateCache();
$wikiPage->getTitle()->purgeSquid();
# Allow the new file version to be patrolled from the page footer
Article::purgePatrolFooterCache($descId);
}
# Update associated rev id. This should be done by $logEntry->insert() earlier,
# but setAssociatedRevId() wasn't called at that point yet...
$logParams = $logEntry->getParameters();
$logParams['associated_rev_id'] = $logEntry->getAssociatedRevId();
$update = array('log_params' => LogEntryBase::makeParamBlob($logParams));
if ($updateLogPage) {
# Also log page, in case where we just created it above
$update['log_page'] = $updateLogPage;
}
$that->getRepo()->getMasterDB()->update('logging', $update, array('log_id' => $logId), __METHOD__);
$that->getRepo()->getMasterDB()->insert('log_search', array('ls_field' => 'associated_rev_id', 'ls_value' => $logEntry->getAssociatedRevId(), 'ls_log_id' => $logId), __METHOD__);
# Now that the log entry is up-to-date, make an RC entry.
$recentChange = $logEntry->publish($logId);
if ($tags) {
ChangeTags::addTags($tags, $recentChange ? $recentChange->getAttribute('rc_id') : null, $logEntry->getAssociatedRevId(), $logId);
}
# Run hook for other updates (typically more cache purging)
Hooks::run('FileUpload', array($that, $reupload, !$newPageContent));
if ($reupload) {
# Delete old thumbnails
$that->purgeThumbnails();
# Remove the old file from the CDN cache
DeferredUpdates::addUpdate(new CdnCacheUpdate(array($that->getUrl())), DeferredUpdates::PRESEND);
} else {
# Update backlink pages pointing to this title if created
LinksUpdate::queueRecursiveJobsForTable($that->getTitle(), 'imagelinks');
}
});
if (!$reupload) {
# This is a new file, so update the image count
DeferredUpdates::addUpdate(SiteStatsUpdate::factory(array('images' => 1)));
}
# Invalidate cache for all pages using this file
DeferredUpdates::addUpdate(new HTMLCacheUpdate($this->getTitle(), 'imagelinks'));
return true;
}
示例4: doDeleteUpdates
/**
* Do some database updates after deletion
*
* @param int $id The page_id value of the page being deleted
* @param Content $content Optional page content to be used when determining
* the required updates. This may be needed because $this->getContent()
* may already return null when the page proper was deleted.
*/
public function doDeleteUpdates($id, Content $content = null)
{
// Update site status
DeferredUpdates::addUpdate(new SiteStatsUpdate(0, 1, -(int) $this->isCountable(), -1));
// Delete pagelinks, update secondary indexes, etc
$updates = $this->getDeletionUpdates($content);
foreach ($updates as $update) {
DeferredUpdates::addUpdate($update);
}
// Reparse any pages transcluding this page
LinksUpdate::queueRecursiveJobsForTable($this->mTitle, 'templatelinks');
// Reparse any pages including this image
if ($this->mTitle->getNamespace() == NS_FILE) {
LinksUpdate::queueRecursiveJobsForTable($this->mTitle, 'imagelinks');
}
// Clear caches
WikiPage::onArticleDelete($this->mTitle);
// Reset this object and the Title object
$this->loadFromRow(false, self::READ_LATEST);
// Search engine
DeferredUpdates::addUpdate(new SearchUpdate($id, $this->mTitle));
}
示例5: doDeleteUpdates
/**
* Do some database updates after deletion
*
* @param int $id The page_id value of the page being deleted
* @param Content $content Optional page content to be used when determining
* the required updates. This may be needed because $this->getContent()
* may already return null when the page proper was deleted.
*/
public function doDeleteUpdates($id, Content $content = null)
{
// Update site status
DeferredUpdates::addUpdate(new SiteStatsUpdate(0, 1, -(int) $this->isCountable(), -1));
// Delete pagelinks, update secondary indexes, etc
$updates = $this->getDeletionUpdates($content);
// Make sure an enqueued jobs run after commit so they see the deletion
wfGetDB(DB_MASTER)->onTransactionIdle(function () use($updates) {
DataUpdate::runUpdates($updates, 'enqueue');
});
// Reparse any pages transcluding this page
LinksUpdate::queueRecursiveJobsForTable($this->mTitle, 'templatelinks');
// Reparse any pages including this image
if ($this->mTitle->getNamespace() == NS_FILE) {
LinksUpdate::queueRecursiveJobsForTable($this->mTitle, 'imagelinks');
}
// Clear caches
WikiPage::onArticleDelete($this->mTitle);
// Reset this object and the Title object
$this->loadFromRow(false, self::READ_LATEST);
// Search engine
DeferredUpdates::addUpdate(new SearchUpdate($id, $this->mTitle));
}