本文整理汇总了PHP中Title::newFromIDs方法的典型用法代码示例。如果您正苦于以下问题:PHP Title::newFromIDs方法的具体用法?PHP Title::newFromIDs怎么用?PHP Title::newFromIDs使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Title
的用法示例。
在下文中一共展示了Title::newFromIDs方法的13个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的PHP代码示例。
示例1: purge
/**
* Purge the cache for pages containing gallery tags
*/
public function purge()
{
global $wgUseFileCache, $wgUseSquid;
$totalGalleryPageCount = 0;
// Keeps track of actual existing titles with gallery
$dbGalleryCount = $this->getGalleryPageCount();
// All counts including those with missing titles
// Paginate the operation to prevent db/memory overload
for ($limitCount = 0; $limitCount < $dbGalleryCount; $limitCount += self::PAGE_COUNT_LIMIT) {
$galleryPageIds = $this->getGalleryPageIds($limitCount);
$galleryPageTitles = \Title::newFromIDs($galleryPageIds);
$galleryPageCount = count($galleryPageTitles);
// abort if no pages were found
if ($galleryPageCount == 0) {
continue;
}
// Update squid/varnish/parser cache
if ($wgUseSquid) {
foreach ($galleryPageTitles as $title) {
$title->purgeSquid();
}
}
// Update file cache if used
if ($wgUseFileCache) {
foreach ($galleryPageTitles as $title) {
\HTMLFileCache::clearFileCache($title);
}
}
$totalGalleryPageCount += $galleryPageCount;
}
$this->info('Gallery page purge request', ['title' => __METHOD__, 'count' => $totalGalleryPageCount]);
return $totalGalleryPageCount;
}
示例2: getAverageLength
function getAverageLength($ids)
{
$titles = Title::newFromIDs($ids);
$len = 0;
foreach ($titles as $title) {
$len += strlen($title->getText());
}
return $len / count($titles);
}
示例3: getTargets
public function getTargets()
{
if ($this->mTo) {
return $this->mTo;
}
$dbr = wfGetDB(DB_SLAVE);
$ns = $this->mFrom->getNamespace();
$t = $this->mFrom->getDBkey();
$res = $dbr->select('indexes', 'in_from', array('in_namespace' => $ns, 'in_title' => $t), __METHOD__);
$ids = array();
foreach ($res as $row) {
$ids[] = $row->in_from;
}
$this->mTo = Title::newFromIDs($ids);
return $this->mTo;
}
示例4: refreshData
/**
* @see SMWStore::refreshData
*
* @param integer $index
* @param integer $count
* @param mixed $namespaces Array or false
* @param boolean $usejobs
*
* @return decimal between 0 and 1 to indicate the overall progress of the refreshing
*/
public function refreshData(&$index, $count, $namespaces = false, $usejobs = true)
{
$updatejobs = array();
$emptyrange = true;
// was nothing done in this run?
// Update by MediaWiki page id --> make sure we get all pages.
$tids = array();
// Array of ids
for ($i = $index; $i < $index + $count; $i++) {
$tids[] = $i;
}
$titles = Title::newFromIDs($tids);
foreach ($titles as $title) {
if ($namespaces == false || in_array($title->getNamespace(), $namespaces)) {
$updatejobs[] = new SMWUpdateJob($title);
$emptyrange = false;
}
}
// update by internal SMW id --> make sure we get all objects in SMW
$dbr = wfGetDB(DB_SLAVE);
$res = $dbr->select('smw_ids', array('smw_id', 'smw_title', 'smw_namespace', 'smw_iw', 'smw_subobject'), array("smw_id >= {$index} ", " smw_id < " . $dbr->addQuotes($index + $count)), __METHOD__);
foreach ($res as $row) {
$emptyrange = false;
// note this even if no jobs were created
if ($namespaces && !in_array($row->smw_namespace, $namespaces)) {
continue;
}
if ($row->smw_subobject !== '') {
// leave subobjects alone; they ought to be changed with their pages
} elseif ($row->smw_iw === '' || $row->smw_iw == SMW_SQL2_SMWREDIIW) {
// objects representing pages
// TODO: special treament of redirects needed, since the store will
// not act on redirects that did not change according to its records
$title = Title::makeTitleSafe($row->smw_namespace, $row->smw_title);
if ($title !== null && !$title->exists()) {
$updatejobs[] = new SMWUpdateJob($title);
}
} elseif ($row->smw_iw == SMW_SQL2_SMWIW_OUTDATED) {
// remove outdated internal object references
foreach (self::getPropertyTables() as $proptable) {
if ($proptable->idsubject) {
$dbr->delete($proptable->name, array('s_id' => $row->smw_id), __METHOD__);
}
}
$dbr->delete('smw_ids', array('smw_id' => $row->smw_id), __METHOD__);
} else {
// "normal" interwiki pages or outdated internal objects
$diWikiPage = new SMWDIWikiPage($row->smw_title, $row->smw_namespace, $row->smw_iw);
$this->deleteSemanticData($diWikiPage);
}
}
$dbr->freeResult($res);
wfRunHooks('smwRefreshDataJobs', array(&$updatejobs));
if ($usejobs) {
Job::batchInsert($updatejobs);
} else {
foreach ($updatejobs as $job) {
$job->run();
}
}
$nextpos = $index + $count;
if ($emptyrange) {
// nothing found, check if there will be more pages later on
$next1 = $dbr->selectField('page', 'page_id', "page_id >= {$nextpos}", __METHOD__, array('ORDER BY' => "page_id ASC"));
$next2 = $dbr->selectField('smw_ids', 'smw_id', "smw_id >= {$nextpos}", __METHOD__, array('ORDER BY' => "smw_id ASC"));
$nextpos = $next2 != 0 && $next2 < $next1 ? $next2 : $next1;
}
$max1 = $dbr->selectField('page', 'MAX(page_id)', '', __METHOD__);
$max2 = $dbr->selectField('smw_ids', 'MAX(smw_id)', '', __METHOD__);
$index = $nextpos ? $nextpos : -1;
return $index > 0 ? $index / max($max1, $max2) : 1;
}
示例5: invalidateIDs
/**
* Invalidate a set of IDs, right now
*/
function invalidateIDs(ResultWrapper $res)
{
global $wgUseFileCache, $wgUseSquid;
if ($res->numRows() == 0) {
return;
}
$dbw =& wfGetDB(DB_MASTER);
$timestamp = $dbw->timestamp();
$done = false;
while (!$done) {
# Get all IDs in this query into an array
$ids = array();
for ($i = 0; $i < $this->mRowsPerQuery; $i++) {
$row = $res->fetchRow();
if ($row) {
$ids[] = $row[0];
} else {
$done = true;
break;
}
}
if (!count($ids)) {
break;
}
# Update page_touched
$dbw->update('page', array('page_touched' => $timestamp), array('page_id IN (' . $dbw->makeList($ids) . ')'), __METHOD__);
# Update squid
if ($wgUseSquid || $wgUseFileCache) {
$titles = Title::newFromIDs($ids);
if ($wgUseSquid) {
$u = SquidUpdate::newFromTitles($titles);
$u->doUpdate();
}
# Update file cache
if ($wgUseFileCache) {
foreach ($titles as $title) {
$cm = new CacheManager($title);
@unlink($cm->fileCacheName());
}
}
}
}
}
示例6: getArticlesDetails
protected function getArticlesDetails($articleIds, $articleKeys = [], $width = 0, $height = 0, $abstract = 0, $strict = false)
{
$articles = is_array($articleIds) ? $articleIds : [$articleIds];
$ids = [];
$collection = [];
$resultingCollectionIds = [];
$titles = [];
foreach ($articles as $i) {
//data is cached on a per-article basis
//to avoid one article requiring purging
//the whole collection
$cache = $this->wg->Memc->get(self::getCacheKey($i, self::DETAILS_CACHE_ID));
if (!is_array($cache)) {
$ids[] = $i;
} else {
$collection[$i] = $cache;
$resultingCollectionIds[] = $i;
}
}
if (count($ids) > 0) {
$titles = Title::newFromIDs($ids);
}
if (!empty($articleKeys)) {
foreach ($articleKeys as $titleKey) {
$titleObj = Title::newFromDbKey($titleKey);
if ($titleObj instanceof Title && $titleObj->exists()) {
$titles[] = $titleObj;
}
}
}
if (!empty($titles)) {
foreach ($titles as $t) {
$fileData = [];
if ($t->getNamespace() == NS_FILE) {
$fileData = $this->getFromFile($t->getText());
} elseif ($t->getNamespace() == NS_MAIN) {
$fileData = ['type' => static::ARTICLE_TYPE];
} elseif ($t->getNamespace() == NS_CATEGORY) {
$fileData = ['type' => static::CATEGORY_TYPE];
}
$id = $t->getArticleID();
$revId = $t->getLatestRevID();
$rev = Revision::newFromId($revId);
if (!empty($rev)) {
$collection[$id] = ['id' => $id, 'title' => $t->getText(), 'ns' => $t->getNamespace(), 'url' => $t->getLocalURL(), 'revision' => ['id' => $revId, 'user' => $rev->getUserText(Revision::FOR_PUBLIC), 'user_id' => $rev->getUser(Revision::FOR_PUBLIC), 'timestamp' => wfTimestamp(TS_UNIX, $rev->getTimestamp())]];
$collection[$id]['comments'] = class_exists('ArticleCommentList') ? ArticleCommentList::newFromTitle($t)->getCountAllNested() : false;
//add file data
$collection[$id] = array_merge($collection[$id], $fileData);
$resultingCollectionIds[] = $id;
$this->wg->Memc->set(self::getCacheKey($id, self::DETAILS_CACHE_ID), $collection[$id], 86400);
} else {
$dataLog = ['titleText' => $t->getText(), 'articleId' => $t->getArticleID(), 'revId' => $revId];
WikiaLogger::instance()->info('No revision found for article', $dataLog);
}
}
$titles = null;
}
//ImageServing has separate caching
//so processing it separately allows to
//make the thumbnail's size parametrical without
//invalidating the titles details' cache
//or the need to duplicate it
$thumbnails = $this->getArticlesThumbnails($resultingCollectionIds, $width, $height);
$articles = null;
//ArticleService has separate caching
//so processing it separately allows to
//make the length parametrical without
//invalidating the titles details' cache
//or the need to duplicate it
foreach ($collection as $id => &$details) {
if ($abstract > 0) {
$as = new ArticleService($id);
$snippet = $as->getTextSnippet($abstract);
} else {
$snippet = null;
}
$details['abstract'] = $snippet;
if (isset($thumbnails[$id])) {
$details = array_merge($details, $thumbnails[$id]);
}
}
$collection = $this->appendMetadata($collection);
$thumbnails = null;
//The collection can be in random order (depends if item was found in memcache or not)
//lets preserve original order even if we are not using strict mode:
//to keep things consistent over time (some other APIs that are using sorted results are using
//ArticleApi::getDetails to fetch info about articles)
$orderedIdsFromTitles = array_diff(array_keys($collection), $articleIds);
//typecasting to convert falsy values into empty array (array_merge require arrays only)
$orderedIds = array_merge((array) $articleIds, (array) $orderedIdsFromTitles);
$collection = $this->preserveOriginalOrder($orderedIds, $collection);
//if strict - return array instead of associative array (dict)
if ($strict) {
return array_values($collection);
} else {
return $collection;
}
}
示例7: refreshData
public function refreshData(&$index, $count, $namespaces = false, $usejobs = true)
{
$updatejobs = array();
$emptyrange = true;
// was nothing found in this run?
// update by MediaWiki page id --> make sure we get all pages
$tids = array();
for ($i = $index; $i < $index + $count; $i++) {
// array of ids
$tids[] = $i;
}
$titles = Title::newFromIDs($tids);
foreach ($titles as $title) {
if ($namespaces == false || in_array($title->getNamespace(), $namespaces)) {
// wikia change start - jobqueue migration
$task = new \Wikia\Tasks\Tasks\JobWrapperTask();
$task->call('SMWUpdateJob', $title);
$updatejobs[] = $task;
// wikia change end
$emptyrange = false;
}
}
wfRunHooks('smwRefreshDataJobs', array(&$updatejobs));
if ($usejobs) {
// wikia change start - jobqueue migration
\Wikia\Tasks\Tasks\BaseTask::batch($updatejobs);
// wikia change end
} else {
foreach ($updatejobs as $job) {
// wikia change start - jobqueue migration
/** @var \Wikia\Tasks\Tasks\JobWrapperTask $job */
try {
$job->init();
} catch (Exception $e) {
continue;
}
$job->wrap('SMWUpdateJob');
// wikia change end
}
}
$db = wfGetDB(DB_SLAVE);
$nextpos = $index + $count;
if ($emptyrange) {
// nothing found, check if there will be more pages later on
$nextpos = $db->selectField('page', 'page_id', "page_id >= {$nextpos}", __METHOD__, array('ORDER BY' => "page_id ASC"));
}
$maxpos = $db->selectField('page', 'MAX(page_id)', '', __METHOD__);
$index = $nextpos ? $nextpos : -1;
return $index > 0 ? $index / $maxpos : 1;
}
示例8: getArtistTitlesFromIds
/**
* @desc Creates instances of Title for given pages ids and returns array of titles' texts
*
* @param Array $pages
* @return array
*/
public function getArtistTitlesFromIds($pages)
{
$result = [];
$artistsTitles = Title::newFromIDs($pages);
foreach ($artistsTitles as $artistTitle) {
$result[] = $artistTitle->getText();
}
return $result;
}
示例9: refreshData
/**
* @see SMWStore::refreshData
*
* @todo This method will be overhauled in SMW 1.9 to become cleaner
* and more robust.
*
* @param integer $index
* @param integer $count
* @param mixed $namespaces Array or false
* @param boolean $usejobs
*
* @return decimal between 0 and 1 to indicate the overall progress of the refreshing
*/
public function refreshData(&$index, $count, $namespaces = false, $usejobs = true)
{
$updatejobs = array();
$emptyrange = true;
// was nothing done in this run?
// Update by MediaWiki page id --> make sure we get all pages.
$tids = array();
// Array of ids
for ($i = $index; $i < $index + $count; $i++) {
$tids[] = $i;
}
$titles = Title::newFromIDs($tids);
foreach ($titles as $title) {
if ($namespaces == false || in_array($title->getNamespace(), $namespaces)) {
// wikia change start - jobqueue migration
$task = new \Wikia\Tasks\Tasks\JobWrapperTask();
$task->call('SMWUpdateJob', $title);
$updatejobs[] = $task;
// wikia change end
$emptyrange = false;
}
}
// update by internal SMW id --> make sure we get all objects in SMW
$dbr = wfGetDB(DB_SLAVE, 'smw');
$res = $dbr->select(SMWSql3SmwIds::tableName, array('smw_id', 'smw_title', 'smw_namespace', 'smw_iw', 'smw_subobject'), array("smw_id >= {$index} ", " smw_id < " . $dbr->addQuotes($index + $count)), __METHOD__);
foreach ($res as $row) {
$emptyrange = false;
// note this even if no jobs were created
if ($namespaces && !in_array($row->smw_namespace, $namespaces)) {
continue;
}
// Find page to refresh, even for special properties:
if ($row->smw_title != '' && $row->smw_title[0] != '_') {
$titleKey = $row->smw_title;
} elseif ($row->smw_namespace == SMW_NS_PROPERTY && $row->smw_iw == '' && $row->smw_subobject == '') {
$titleKey = str_replace(' ', '_', SMWDIProperty::findPropertyLabel($row->smw_title));
} else {
$titleKey = '';
}
if ($row->smw_subobject !== '') {
// leave subobjects alone; they ought to be changed with their pages
} elseif (($row->smw_iw === '' || $row->smw_iw == SMW_SQL3_SMWREDIIW) && $titleKey != '') {
// objects representing pages
// TODO: special treament of redirects needed, since the store will
// not act on redirects that did not change according to its records
$title = Title::makeTitleSafe($row->smw_namespace, $titleKey);
if ($title !== null && !$title->exists()) {
// wikia change start - jobqueue migration
$task = new \Wikia\Tasks\Tasks\JobWrapperTask();
$task->call('SMWUpdateJob', $title);
$updatejobs[] = $task;
// wikia change end
}
} elseif ($row->smw_iw == SMW_SQL3_SMWIW_OUTDATED) {
// remove outdated internal object references
$dbw = wfGetDB(DB_MASTER, 'smw');
foreach (SMWSQLStore3::getPropertyTables() as $proptable) {
if ($proptable->usesIdSubject()) {
$dbw->delete($proptable->getName(), array('s_id' => $row->smw_id), __METHOD__);
}
}
$dbw->delete(SMWSql3SmwIds::tableName, array('smw_id' => $row->smw_id), __METHOD__);
} elseif ($titleKey != '') {
// "normal" interwiki pages or outdated internal objects -- delete
$diWikiPage = new SMWDIWikiPage($titleKey, $row->smw_namespace, $row->smw_iw);
$emptySemanticData = new SMWSemanticData($diWikiPage);
$this->store->doDataUpdate($emptySemanticData);
}
}
$dbr->freeResult($res);
wfRunHooks('smwRefreshDataJobs', array(&$updatejobs));
if ($usejobs) {
// wikia change start - jobqueue migration
\Wikia\Tasks\Tasks\BaseTask::batch($updatejobs);
// wikia change end
} else {
foreach ($updatejobs as $job) {
// wikia change start - jobqueue migration
/** @var \Wikia\Tasks\Tasks\JobWrapperTask $job */
try {
$job->init();
} catch (Exception $e) {
continue;
}
$job->wrap('SMWUpdateJob');
// wikia change end
}
//.........这里部分代码省略.........
示例10: invalidateIDs
/**
* Invalidate a set of IDs, right now
*/
public function invalidateIDs(ResultWrapper $res)
{
global $wgUseFileCache, $wgUseSquid;
if ($res->numRows() == 0) {
return;
}
// sanity check
$dbw = wfGetDB(DB_MASTER);
$timestamp = $dbw->timestamp();
$done = false;
while (!$done) {
# Get all IDs in this query into an array
$ids = array();
for ($i = 0; $i < $this->mRowsPerQuery; $i++) {
$row = $res->fetchRow();
if ($row) {
$ids[] = $row[0];
} else {
$done = true;
break;
}
}
if (count($ids) == 0) {
break;
}
# Update page_touched
$dbw->update('page', array('page_touched' => $timestamp), array('page_id' => $ids), __METHOD__);
# Update static caches
if ($wgUseSquid || $wgUseFileCache) {
$titles = Title::newFromIDs($ids);
# Update squid cache
if ($wgUseSquid) {
$u = SquidUpdate::newFromTitles($titles);
$u->doUpdate();
}
# Update file cache
if ($wgUseFileCache) {
foreach ($titles as $title) {
HTMLFileCache::clearFileCache($title);
}
}
}
}
}
示例11: getCommentsNumber
protected function getCommentsNumber($item)
{
$titles = Title::newFromIDs($item['pageid']);
if (empty($titles)) {
return null;
}
$title = $titles[0];
if (class_exists('ArticleCommentList')) {
$commentsList = ArticleCommentList::newFromTitle($title);
return $commentsList->getCountAllNested();
}
return null;
}
示例12: latestViewPages
public function latestViewPages($namespace = -1)
{
global $wgStatsDB, $wgStatsDBEnabled, $wgLang;
wfProfileIn(__METHOD__);
$result = array();
if (!empty($wgStatsDBEnabled)) {
$dbr = wfGetDB(DB_SLAVE, array(), $wgStatsDB);
$where = array("pv_city_id" => $this->mCityId, "pv_ts >= '" . date('Y-m-d H-i-s', time() - self::PV_DELTA) . "'");
if ($namespace > 0) {
$where['pv_namespace'] = $namespace;
}
$res = $dbr->select(array('page_views_articles'), array('pv_page_id', 'pv_views'), $where, __METHOD__, array('ORDER BY' => 'pv_ts DESC', 'LIMIT' => self::PV_LIMIT));
$ids = array();
$count = array();
$loop = 0;
while ($oRow = $dbr->fetchObject($res)) {
if (!isset($ids[$oRow->pv_page_id])) {
$ids[$oRow->pv_page_id] = $loop;
$loop++;
}
$count[$oRow->pv_page_id] += $oRow->pv_views;
}
$dbr->freeResult($res);
$titles = Title::newFromIDs(array_keys($ids));
$urls = array();
foreach ($titles as $oTitle) {
$page_id = $oTitle->getArticleID();
$urls[$page_id] = Xml::element("a", array("href" => $oTitle->getLocalURL()), $oTitle->getFullText());
}
foreach ($ids as $page_id => $position) {
if (isset($urls[$page_id])) {
$result[] = $wgLang->specialList($urls[$page_id], $count[$page_id] . "x");
}
}
}
wfProfileOut(__METHOD__);
#---
return $result;
}
示例13: refreshData
public function refreshData( &$index, $count, $namespaces = false, $usejobs = true ) {
$updatejobs = array();
$emptyrange = true; // was nothing found in this run?
// update by MediaWiki page id --> make sure we get all pages
$tids = array();
for ( $i = $index; $i < $index + $count; $i++ ) { // array of ids
$tids[] = $i;
}
$titles = Title::newFromIDs( $tids );
foreach ( $titles as $title ) {
if ( ( $namespaces == false ) || ( in_array( $title->getNamespace(), $namespaces ) ) ) {
$updatejobs[] = new SMWUpdateJob( $title );
$emptyrange = false;
}
}
wfRunHooks('smwRefreshDataJobs', array(&$updatejobs));
if ( $usejobs ) {
Job::batchInsert( $updatejobs );
} else {
foreach ( $updatejobs as $job ) {
$job->run();
}
}
$db = wfGetDB( DB_SLAVE );
$nextpos = $index + $count;
if ( $emptyrange ) { // nothing found, check if there will be more pages later on
$nextpos = $db->selectField( 'page', 'page_id', "page_id >= $nextpos", __METHOD__, array( 'ORDER BY' => "page_id ASC" ) );
}
$maxpos = $db->selectField( 'page', 'MAX(page_id)', '', __METHOD__ );
$index = $nextpos ? $nextpos : -1;
return ( $index > 0 ) ? ( $index / $maxpos ) : 1;
}