本文整理汇总了PHP中wfRecursiveRemoveDir函数的典型用法代码示例。如果您正苦于以下问题:PHP wfRecursiveRemoveDir函数的具体用法?PHP wfRecursiveRemoveDir怎么用?PHP wfRecursiveRemoveDir使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了wfRecursiveRemoveDir函数的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的PHP代码示例。
示例1: tearDown
protected function tearDown()
{
global $wgRequest;
$status = ob_get_status();
if (isset($status['name']) && $status['name'] === 'MediaWikiTestCase::wfResetOutputBuffersBarrier') {
ob_end_flush();
}
$this->called['tearDown'] = true;
// Cleaning up temporary files
foreach ($this->tmpFiles as $fileName) {
if (is_file($fileName) || is_link($fileName)) {
unlink($fileName);
} elseif (is_dir($fileName)) {
wfRecursiveRemoveDir($fileName);
}
}
if ($this->needsDB() && $this->db) {
// Clean up open transactions
while ($this->db->trxLevel() > 0) {
$this->db->rollback(__METHOD__, 'flush');
}
}
// Restore mw globals
foreach ($this->mwGlobals as $key => $value) {
$GLOBALS[$key] = $value;
}
$this->mwGlobals = array();
RequestContext::resetMain();
MediaHandler::resetCache();
if (session_id() !== '') {
session_write_close();
session_id('');
}
$wgRequest = new FauxRequest();
MediaWiki\Session\SessionManager::resetCache();
$phpErrorLevel = intval(ini_get('error_reporting'));
if ($phpErrorLevel !== $this->phpErrorLevel) {
ini_set('error_reporting', $this->phpErrorLevel);
$oldHex = strtoupper(dechex($this->phpErrorLevel));
$newHex = strtoupper(dechex($phpErrorLevel));
$message = "PHP error_reporting setting was left dirty: " . "was 0x{$oldHex} before test, 0x{$newHex} after test!";
$this->fail($message);
}
parent::tearDown();
}
示例2: wfRecursiveRemoveDir
/**
* Remove a directory and all its content.
* Does not hide error.
* @param string $dir
*/
function wfRecursiveRemoveDir($dir)
{
wfDebug(__FUNCTION__ . "( {$dir} )\n");
// taken from http://de3.php.net/manual/en/function.rmdir.php#98622
if (is_dir($dir)) {
$objects = scandir($dir);
foreach ($objects as $object) {
if ($object != "." && $object != "..") {
if (filetype($dir . '/' . $object) == "dir") {
wfRecursiveRemoveDir($dir . '/' . $object);
} else {
unlink($dir . '/' . $object);
}
}
}
reset($objects);
rmdir($dir);
}
}
示例3: tearDown
protected function tearDown()
{
// Cleaning up temporary files
foreach ($this->tmpfiles as $fname) {
if (is_file($fname) || is_link($fname)) {
unlink($fname);
} elseif (is_dir($fname)) {
wfRecursiveRemoveDir($fname);
}
}
// Clean up open transactions
if ($this->needsDB() && $this->db) {
while ($this->db->trxLevel() > 0) {
$this->db->rollback();
}
}
// Restore mw globals
foreach ($this->mwGlobals as $key => $value) {
$GLOBALS[$key] = $value;
}
$this->mwGlobals = array();
parent::tearDown();
}
示例4: tearDown
protected function tearDown()
{
wfProfileIn(__METHOD__);
// Cleaning up temporary files
foreach ($this->tmpFiles as $fileName) {
if (is_file($fileName) || is_link($fileName)) {
unlink($fileName);
} elseif (is_dir($fileName)) {
wfRecursiveRemoveDir($fileName);
}
}
if ($this->needsDB() && $this->db) {
// Clean up open transactions
while ($this->db->trxLevel() > 0) {
$this->db->rollback();
}
// don't ignore DB errors
$this->db->ignoreErrors(false);
}
// Restore mw globals
foreach ($this->mwGlobals as $key => $value) {
$GLOBALS[$key] = $value;
}
$this->mwGlobals = array();
RequestContext::resetMain();
MediaHandler::resetCache();
$phpErrorLevel = intval(ini_get('error_reporting'));
if ($phpErrorLevel !== $this->phpErrorLevel) {
ini_set('error_reporting', $this->phpErrorLevel);
$oldHex = strtoupper(dechex($this->phpErrorLevel));
$newHex = strtoupper(dechex($phpErrorLevel));
$message = "PHP error_reporting setting was left dirty: " . "was 0x{$oldHex} before test, 0x{$newHex} after test!";
$this->fail($message);
}
parent::tearDown();
wfProfileOut(__METHOD__);
}
示例5: checkpointHelper
/**
* Ensures that checkpoint dumps are used and written, by successively increasing the
* stub size and dumping until the duration crosses a threshold.
*
* @param string $checkpointFormat Either "file" for plain text or "gzip" for gzipped
* checkpoint files.
*/
private function checkpointHelper($checkpointFormat = "file")
{
// Getting temporary names
$nameStub = $this->getNewTempFile();
$nameOutputDir = $this->getNewTempDirectory();
$stderr = fopen('php://output', 'a');
if ($stderr === false) {
$this->fail("Could not open stream for stderr");
}
$iterations = 32;
// We'll start with that many iterations of revisions
// in stub. Make sure that the generated volume is above the buffer size
// set below. Otherwise, the checkpointing does not trigger.
$lastDuration = 0;
$minDuration = 2;
// We want the dump to take at least this many seconds
$checkpointAfter = 0.5;
// Generate checkpoint after this many seconds
// Until a dump takes at least $minDuration seconds, perform a dump and check
// duration. If the dump did not take long enough increase the iteration
// count, to generate a bigger stub file next time.
while ($lastDuration < $minDuration) {
// Setting up the dump
wfRecursiveRemoveDir($nameOutputDir);
$this->assertTrue(wfMkdirParents($nameOutputDir), "Creating temporary output directory ");
$this->setUpStub($nameStub, $iterations);
$dumper = new TextPassDumper(array("--stub=file:" . $nameStub, "--output=" . $checkpointFormat . ":" . $nameOutputDir . "/full", "--maxtime=1", "--buffersize=32768", "--checkpointfile=checkpoint-%s-%s.xml.gz"));
$dumper->setDb($this->db);
$dumper->maxTimeAllowed = $checkpointAfter;
// Patching maxTime from 1 minute
$dumper->stderr = $stderr;
// The actual dump and taking time
$ts_before = microtime(true);
$dumper->dump(WikiExporter::FULL, WikiExporter::TEXT);
$ts_after = microtime(true);
$lastDuration = $ts_after - $ts_before;
// Handling increasing the iteration count for the stubs
if ($lastDuration < $minDuration) {
$old_iterations = $iterations;
if ($lastDuration > 0.2) {
// lastDuration is big enough, to allow an educated guess
$factor = ($minDuration + 0.5) / $lastDuration;
if ($factor > 1.1 && $factor < 100) {
// educated guess is reasonable
$iterations = (int) ($iterations * $factor);
}
}
if ($old_iterations == $iterations) {
// Heuristics were not applied, so we just *2.
$iterations *= 2;
}
$this->assertLessThan(50000, $iterations, "Emergency stop against infinitely increasing iteration " . "count ( last duration: {$lastDuration} )");
}
}
// The dump (hopefully) did take long enough to produce more than one
// checkpoint file.
//
// We now check all the checkpoint files for validity.
$files = scandir($nameOutputDir);
$this->assertTrue(asort($files), "Sorting files in temporary directory");
$fileOpened = false;
$lookingForPage = 1;
$checkpointFiles = 0;
// Each run of the following loop body tries to handle exactly 1 /page/ (not
// iteration of stub content). $i is only increased after having treated page 4.
for ($i = 0; $i < $iterations;) {
// 1. Assuring a file is opened and ready. Skipping across header if
// necessary.
if (!$fileOpened) {
$this->assertNotEmpty($files, "No more existing dump files, " . "but not yet all pages found");
$fname = array_shift($files);
while ($fname == "." || $fname == "..") {
$this->assertNotEmpty($files, "No more existing dump" . " files, but not yet all pages found");
$fname = array_shift($files);
}
if ($checkpointFormat == "gzip") {
$this->gunzip($nameOutputDir . "/" . $fname);
}
$this->assertDumpStart($nameOutputDir . "/" . $fname);
$fileOpened = true;
$checkpointFiles++;
}
// 2. Performing a single page check
switch ($lookingForPage) {
case 1:
// Page 1
$this->assertPageStart($this->pageId1 + $i * self::$numOfPages, NS_MAIN, "BackupDumperTestP1");
$this->assertRevision($this->revId1_1 + $i * self::$numOfRevs, "BackupDumperTestP1Summary1", $this->textId1_1, false, "0bolhl6ol7i6x0e7yq91gxgaan39j87", "BackupDumperTestP1Text1");
$this->assertPageEnd();
$lookingForPage = 2;
break;
case 2:
// Page 2
//.........这里部分代码省略.........
示例6: teardownTestResources
function teardownTestResources($testResourceName)
{
// remove test database
$dbw = wfGetDB(DB_MASTER);
$dbw->query('DROP DATABASE IF EXISTS ' . $testResourceName);
$testUploadPath = getTestUploadPathFromResourceName($testResourceName);
// remove test image dir
if (file_exists($testUploadPath)) {
wfRecursiveRemoveDir($testUploadPath);
}
}
示例7: dynamicPageList
//.........这里部分代码省略.........
// replace %DPLTIME% by execution time and timestamp in header and footer
$nowTimeStamp = self::prettyTimeStamp(date('YmdHis'));
$dplElapsedTime = sprintf('%.3f sec.', microtime(true) - $dplStartTime);
$header = str_replace('%DPLTIME%', "{$dplElapsedTime} ({$nowTimeStamp})", $header);
$footer = str_replace('%DPLTIME%', "{$dplElapsedTime} ({$nowTimeStamp})", $footer);
// replace %LASTTITLE% / %LASTNAMESPACE% by the last title found in header and footer
if (($n = count($aArticles)) > 0) {
$firstNamespaceFound = str_replace(' ', '_', $aArticles[0]->mTitle->getNamespace());
$firstTitleFound = str_replace(' ', '_', $aArticles[0]->mTitle->getText());
$lastNamespaceFound = str_replace(' ', '_', $aArticles[$n - 1]->mTitle->getNamespace());
$lastTitleFound = str_replace(' ', '_', $aArticles[$n - 1]->mTitle->getText());
}
$header = str_replace('%FIRSTNAMESPACE%', $firstNamespaceFound, $header);
$footer = str_replace('%FIRSTNAMESPACE%', $firstNamespaceFound, $footer);
$header = str_replace('%FIRSTTITLE%', $firstTitleFound, $header);
$footer = str_replace('%FIRSTTITLE%', $firstTitleFound, $footer);
$header = str_replace('%LASTNAMESPACE%', $lastNamespaceFound, $header);
$footer = str_replace('%LASTNAMESPACE%', $lastNamespaceFound, $footer);
$header = str_replace('%LASTTITLE%', $lastTitleFound, $header);
$footer = str_replace('%LASTTITLE%', $lastTitleFound, $footer);
$header = str_replace('%SCROLLDIR%', $scrollDir, $header);
$footer = str_replace('%SCROLLDIR%', $scrollDir, $footer);
$output .= $header . $dplResult . $footer;
self::defineScrollVariables($firstNamespaceFound, $firstTitleFound, $lastNamespaceFound, $lastTitleFound, $scrollDir, $iCount, "{$dplElapsedTime} ({$nowTimeStamp})", $rowcount, $dpl->getRowCount());
// save generated wiki text to dplcache page if desired
if ($DPLCache != '') {
// save data in chosen storage
switch ($DPLCacheStorage) {
case 'files':
if (!is_writeable($cacheFile)) {
wfMkdirParents(dirname($cacheFile));
} elseif (($bDPLRefresh || $wgRequest->getVal('action', 'view') == 'submit') && strpos($DPLCache, '/') > 0 && strpos($DPLCache, '..') === false) {
// if the cache file contains a path and the user requested a refresh (or saved the file) we delete all brothers
wfRecursiveRemoveDir(dirname($cacheFile));
wfMkdirParents(dirname($cacheFile));
}
$cFile = fopen($cacheFile, 'w');
fwrite($cFile, $originalInput);
fwrite($cFile, "+++\n");
fwrite($cFile, $output);
fclose($cFile);
break;
case 'memcache':
// create the unique cache key (replace spaces with underscores)
$cacheKey = self::getMemcacheKey($DPLCache);
$cacheData = array('timestamp' => time(), 'input' => $originalInput, 'output' => $output);
$wgMemc->set($cacheKey, $cacheData, $iDPLCachePeriod);
break;
}
$cacheTimeStamp = self::prettyTimeStamp(date('YmdHis'));
$dplElapsedTime = time() - $dplStartTime;
if ($logger->iDebugLevel >= 2) {
$output .= "{{Extension DPL cache|mode=update|page={{FULLPAGENAME}}|cache={$DPLCache}|date={$cacheTimeStamp}|age=0|now=" . date('H:i:s') . "|dpltime={$dplElapsedTime}|offset={$iOffset}}}";
}
/** Wikia change begin - never disable parser cache (CE-1066) **/
// $parser->disableCache();
/** Wikia change end **/
}
// update dependencies to CacheAPI if DPL is to respect the MW ParserCache and the page containing the DPL query is changed
if (ExtDynamicPageList::$useCacheAPI && $bAllowCachedResults && $wgRequest->getVal('action', 'view') == 'submit') {
/*
CacheAPI::remDependencies( $parser->mTitle->getArticleID());
// add category dependencies
$conditionTypes = array ( CACHETYPE_CATEGORY );
示例8: tearDown
protected function tearDown()
{
// Cleaning up temporary files
foreach ($this->tmpfiles as $fname) {
if (is_file($fname) || is_link($fname)) {
unlink($fname);
} elseif (is_dir($fname)) {
wfRecursiveRemoveDir($fname);
}
}
// clean up open transactions
if ($this->needsDB() && $this->db) {
while ($this->db->trxLevel() > 0) {
$this->db->rollback();
}
}
parent::tearDown();
}
示例9: tearDownFiles
private function tearDownFiles()
{
foreach ($this->createdDirs as $dir) {
wfRecursiveRemoveDir($dir);
}
}
示例10: tearDown
protected function tearDown()
{
global $wgRequest, $wgSQLMode;
$status = ob_get_status();
if (isset($status['name']) && $status['name'] === 'MediaWikiTestCase::wfResetOutputBuffersBarrier') {
ob_end_flush();
}
$this->called['tearDown'] = true;
// Cleaning up temporary files
foreach ($this->tmpFiles as $fileName) {
if (is_file($fileName) || is_link($fileName)) {
unlink($fileName);
} elseif (is_dir($fileName)) {
wfRecursiveRemoveDir($fileName);
}
}
if ($this->needsDB() && $this->db) {
// Clean up open transactions
while ($this->db->trxLevel() > 0) {
$this->db->rollback(__METHOD__, 'flush');
}
if ($this->db->getType() === 'mysql') {
$this->db->query("SET sql_mode = " . $this->db->addQuotes($wgSQLMode));
}
}
// Restore mw globals
foreach ($this->mwGlobals as $key => $value) {
$GLOBALS[$key] = $value;
}
$this->mwGlobals = [];
$this->restoreLoggers();
if (self::$serviceLocator && MediaWikiServices::getInstance() !== self::$serviceLocator) {
MediaWikiServices::forceGlobalInstance(self::$serviceLocator);
}
// TODO: move global state into MediaWikiServices
RequestContext::resetMain();
if (session_id() !== '') {
session_write_close();
session_id('');
}
$wgRequest = new FauxRequest();
MediaWiki\Session\SessionManager::resetCache();
MediaWiki\Auth\AuthManager::resetCache();
$phpErrorLevel = intval(ini_get('error_reporting'));
if ($phpErrorLevel !== $this->phpErrorLevel) {
ini_set('error_reporting', $this->phpErrorLevel);
$oldHex = strtoupper(dechex($this->phpErrorLevel));
$newHex = strtoupper(dechex($phpErrorLevel));
$message = "PHP error_reporting setting was left dirty: " . "was 0x{$oldHex} before test, 0x{$newHex} after test!";
$this->fail($message);
}
parent::tearDown();
}
示例11: execute
public function execute()
{
global $wgCaptchaSecret, $wgCaptchaDirectoryLevels;
$instance = ConfirmEditHooks::getInstance();
if (!$instance instanceof FancyCaptcha) {
$this->error("\$wgCaptchaClass is not FancyCaptcha.\n", 1);
}
$backend = $instance->getBackend();
$countAct = $instance->estimateCaptchaCount();
$this->output("Estimated number of captchas is {$countAct}.\n");
$countGen = (int) $this->getOption('fill') - $countAct;
if ($countGen <= 0) {
$this->output("No need to generate anymore captchas.\n");
return;
}
$tmpDir = wfTempDir() . '/mw-fancycaptcha-' . time() . '-' . wfRandomString(6);
if (!wfMkdirParents($tmpDir)) {
$this->error("Could not create temp directory.\n", 1);
}
$e = null;
// exception
try {
$cmd = sprintf("python %s --key %s --output %s --count %s --dirs %s", wfEscapeShellArg(__DIR__ . '/../captcha.py'), wfEscapeShellArg($wgCaptchaSecret), wfEscapeShellArg($tmpDir), wfEscapeShellArg($countGen), wfEscapeShellArg($wgCaptchaDirectoryLevels));
foreach (array('wordlist', 'font', 'font-size', 'blacklist', 'verbose') as $par) {
if ($this->hasOption($par)) {
$cmd .= " --{$par} " . wfEscapeShellArg($this->getOption($par));
}
}
$this->output("Generating {$countGen} new captchas...\n");
$retVal = 1;
wfShellExec($cmd, $retVal, array(), array('time' => 0));
if ($retVal != 0) {
wfRecursiveRemoveDir($tmpDir);
$this->error("Could not run generation script.\n", 1);
}
$flags = FilesystemIterator::SKIP_DOTS;
$iter = new RecursiveIteratorIterator(new RecursiveDirectoryIterator($tmpDir, $flags), RecursiveIteratorIterator::CHILD_FIRST);
$this->output("Copying the new captchas to storage...\n");
foreach ($iter as $fileInfo) {
if (!$fileInfo->isFile()) {
continue;
}
list($salt, $hash) = $instance->hashFromImageName($fileInfo->getBasename());
$dest = $instance->imagePath($salt, $hash);
$backend->prepare(array('dir' => dirname($dest)));
$status = $backend->quickStore(array('src' => $fileInfo->getPathname(), 'dst' => $dest));
if (!$status->isOK()) {
$this->error("Could not save file '{$fileInfo->getPathname()}'.\n");
}
}
} catch (Exception $e) {
wfRecursiveRemoveDir($tmpDir);
throw $e;
}
$this->output("Removing temporary files...\n");
wfRecursiveRemoveDir($tmpDir);
$this->output("Done.\n");
}