本文整理汇总了PHP中BackWPup_Job::log方法的典型用法代码示例。如果您正苦于以下问题:PHP BackWPup_Job::log方法的具体用法?PHP BackWPup_Job::log怎么用?PHP BackWPup_Job::log使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类BackWPup_Job
的用法示例。
在下文中一共展示了BackWPup_Job::log方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的PHP代码示例。
示例1: job_run
public function job_run(BackWPup_Job $job_object)
{
global $wpdb;
$job_object->substeps_todo = 1;
$job_object->log(sprintf(__('%d. Trying to generate a file with installed widget names …', 'backwpup'), $job_object->steps_data[$job_object->step_working]['STEP_TRY']));
//build filename
if (empty($job_object->temp['widgetlistfile'])) {
$job_object->temp['widgetlistfile'] = $job_object->generate_filename($job_object->job['widgetlistfile'], 'sql') . $job_object->job['widgetlistfilecompression'];
}
$handle = fopen($job_object->temp['widgetlistfile'], 'w');
if ($handle) {
$query = "SELECT * FROM {$wpdb->options} WHERE option_name LIKE 'widget_%'";
$rows = $wpdb->get_results($query);
$header = '';
foreach ($rows as $row) {
$header .= "INSERT INTO {$wpdb->options} (option_name, option_value, autoload) VALUES" . "('" . esc_sql($row->option_name) . "', '" . esc_sql($row->option_value) . "', '" . esc_sql($row->autoload) . "')" . "ON DUPLICATE KEY UPDATE option_value = '" . esc_sql($row->option_value) . "';\n";
}
$query = "SELECT * FROM {$wpdb->options} WHERE option_name = 'sidebars_widgets'";
$rows = $wpdb->get_results($query);
foreach ($rows as $row) {
$header .= "INSERT INTO {$wpdb->options} (option_name, option_value, autoload) VALUES" . "('" . esc_sql($row->option_name) . "', '" . esc_sql($row->option_value) . "', '" . esc_sql($row->autoload) . "')" . "ON DUPLICATE KEY UPDATE option_value = '" . esc_sql($row->option_value) . "';\n";
}
fwrite($handle, $header);
fclose($handle);
} else {
$job_object->log(__('Can not open target file for writing.', 'backwpup'), E_USER_ERROR);
return FALSE;
}
if ($job_object->temp['widgetlistfile']) {
$job_object->additional_files_to_backup[] = $job_object->temp['widgetlistfile'];
$job_object->log(sprintf(__('Added widget list file "%1$s" with %2$s to backup file list.', 'backwpup'), $job_object->temp['widgetlistfile'], size_format($job_object->temp['widgetlistfile']), 2));
}
$job_object->substeps_done = 1;
return TRUE;
}
示例2: job_run_archive
/**
* @param $job_object
*
* @return bool
*/
public function job_run_archive(BackWPup_Job $job_object)
{
$job_object->substeps_todo = 2 + $job_object->backup_filesize;
if ($job_object->steps_data[$job_object->step_working]['SAVE_STEP_TRY'] != $job_object->steps_data[$job_object->step_working]['STEP_TRY']) {
$job_object->log(sprintf(__('%d. Try to send backup file to Dropbox …', 'backwpup'), $job_object->steps_data[$job_object->step_working]['STEP_TRY']));
}
try {
$dropbox = new BackWPup_Destination_Dropbox_API($job_object->job['dropboxroot']);
// cahnge oauth1 to oauth2 token
if (!empty($job_object->job['dropboxsecret']) && empty($job_object->job['dropboxtoken']['access_token'])) {
$dropbox->setOAuthTokens(array('access_token' => $job_object->job['dropboxtoken'], 'oauth_token_secret' => BackWPup_Encryption::decrypt($job_object->job['dropboxsecret'])));
$job_object->job['dropboxtoken'] = $dropbox->token_from_oauth1();
BackWPup_Option::update($job_object->job['jobid'], 'dropboxtoken', $job_object->job['dropboxtoken']);
BackWPup_Option::delete($job_object->job['jobid'], 'dropboxsecret');
}
// set the tokens
$dropbox->setOAuthTokens($job_object->job['dropboxtoken']);
//get account info
if ($job_object->steps_data[$job_object->step_working]['SAVE_STEP_TRY'] != $job_object->steps_data[$job_object->step_working]['STEP_TRY']) {
$info = $dropbox->accountInfo();
if (!empty($info['uid'])) {
if ($job_object->is_debug()) {
$user = $info['display_name'] . ' (' . $info['email'] . ')';
} else {
$user = $info['display_name'];
}
$job_object->log(sprintf(__('Authenticated with Dropbox of user: %s', 'backwpup'), $user));
//Quota
if ($job_object->is_debug()) {
$dropboxfreespase = $info['quota_info']['quota'] - $info['quota_info']['shared'] - $info['quota_info']['normal'];
$job_object->log(sprintf(__('%s available on your Dropbox', 'backwpup'), size_format($dropboxfreespase, 2)));
}
} else {
$job_object->log(__('Not Authenticated with Dropbox!', 'backwpup'), E_USER_ERROR);
return false;
}
$job_object->log(__('Uploading to Dropbox …', 'backwpup'));
}
// put the file
self::$backwpup_job_object =& $job_object;
if ($job_object->substeps_done < $job_object->backup_filesize) {
//only if upload not complete
$response = $dropbox->upload($job_object->backup_folder . $job_object->backup_file, $job_object->job['dropboxdir'] . $job_object->backup_file);
if ($response['bytes'] == $job_object->backup_filesize) {
if (!empty($job_object->job['jobid'])) {
BackWPup_Option::update($job_object->job['jobid'], 'lastbackupdownloadurl', network_admin_url('admin.php') . '?page=backwpupbackups&action=downloaddropbox&file=' . ltrim($response['path'], '/') . '&jobid=' . $job_object->job['jobid']);
}
$job_object->substeps_done = 1 + $job_object->backup_filesize;
$job_object->log(sprintf(__('Backup transferred to %s', 'backwpup'), 'https://content.dropboxapi.com/1/files/' . $job_object->job['dropboxroot'] . $response['path']), E_USER_NOTICE);
} else {
if ($response['bytes'] != $job_object->backup_filesize) {
$job_object->log(__('Uploaded file size and local file size don\'t match.', 'backwpup'), E_USER_ERROR);
} else {
$job_object->log(sprintf(__('Error transfering backup to %s.', 'backwpup') . ' ' . $response['error'], __('Dropbox', 'backwpup')), E_USER_ERROR);
}
return false;
}
}
$backupfilelist = array();
$filecounter = 0;
$files = array();
$metadata = $dropbox->metadata($job_object->job['dropboxdir']);
if (is_array($metadata)) {
foreach ($metadata['contents'] as $data) {
if ($data['is_dir'] != true) {
$file = basename($data['path']);
if ($job_object->is_backup_archive($file)) {
$backupfilelist[strtotime($data['modified'])] = $file;
}
$files[$filecounter]['folder'] = "https://content.dropboxapi.com/1/files/" . $job_object->job['dropboxroot'] . dirname($data['path']) . "/";
$files[$filecounter]['file'] = $data['path'];
$files[$filecounter]['filename'] = basename($data['path']);
$files[$filecounter]['downloadurl'] = network_admin_url('admin.php?page=backwpupbackups&action=downloaddropbox&file=' . $data['path'] . '&jobid=' . $job_object->job['jobid']);
$files[$filecounter]['filesize'] = $data['bytes'];
$files[$filecounter]['time'] = strtotime($data['modified']) + get_option('gmt_offset') * 3600;
$filecounter++;
}
}
}
if ($job_object->job['dropboxmaxbackups'] > 0 && is_object($dropbox)) {
//Delete old backups
if (count($backupfilelist) > $job_object->job['dropboxmaxbackups']) {
ksort($backupfilelist);
$numdeltefiles = 0;
while ($file = array_shift($backupfilelist)) {
if (count($backupfilelist) < $job_object->job['dropboxmaxbackups']) {
break;
}
$response = $dropbox->fileopsDelete($job_object->job['dropboxdir'] . $file);
//delete files on Cloud
if ($response['is_deleted'] == 'true') {
foreach ($files as $key => $filedata) {
if ($filedata['file'] == '/' . $job_object->job['dropboxdir'] . $file) {
unset($files[$key]);
}
//.........这里部分代码省略.........
示例3: job_run_archive
/**
* @param $job_object BackWPup_Job
* @return bool
*/
public function job_run_archive(BackWPup_Job $job_object)
{
$job_object->substeps_todo = 2 + $job_object->backup_filesize;
$job_object->log(sprintf(__('%d. Try to send backup to SugarSync …', 'backwpup'), $job_object->steps_data[$job_object->step_working]['STEP_TRY']), E_USER_NOTICE);
try {
$sugarsync = new BackWPup_Destination_SugarSync_API($job_object->job['sugarrefreshtoken']);
//Check Quota
$user = $sugarsync->user();
if (!empty($user->nickname)) {
$job_object->log(sprintf(__('Authenticated to SugarSync with nickname %s', 'backwpup'), $user->nickname), E_USER_NOTICE);
}
$sugarsyncfreespase = (double) $user->quota->limit - (double) $user->quota->usage;
//float fixes bug for display of no free space
if ($job_object->backup_filesize > $sugarsyncfreespase) {
$job_object->log(sprintf(_x('Not enough disk space available on SugarSync. Available: %s.', 'Available space on SugarSync', 'backwpup'), size_format($sugarsyncfreespase, 2)), E_USER_ERROR);
$job_object->substeps_todo = 1 + $job_object->backup_filesize;
return TRUE;
} else {
$job_object->log(sprintf(__('%s available at SugarSync', 'backwpup'), size_format($sugarsyncfreespase, 2)), E_USER_NOTICE);
}
//Create and change folder
$sugarsync->mkdir($job_object->job['sugardir'], $job_object->job['sugarroot']);
$dirid = $sugarsync->chdir($job_object->job['sugardir'], $job_object->job['sugarroot']);
//Upload to SugarSync
$job_object->substeps_done = 0;
$job_object->log(__('Starting upload to SugarSync …', 'backwpup'), E_USER_NOTICE);
self::$backwpup_job_object =& $job_object;
$reponse = $sugarsync->upload($job_object->backup_folder . $job_object->backup_file);
if (is_object($reponse)) {
if (!empty($job_object->job['jobid'])) {
BackWPup_Option::update($job_object->job['jobid'], 'lastbackupdownloadurl', network_admin_url('admin.php') . '?page=backwpupbackups&action=downloadsugarsync&file=' . (string) $reponse . '&jobid=' . $job_object->job['jobid']);
}
$job_object->substeps_done++;
$job_object->log(sprintf(__('Backup transferred to %s', 'backwpup'), 'https://' . $user->nickname . '.sugarsync.com/' . $sugarsync->showdir($dirid) . $job_object->backup_file), E_USER_NOTICE);
} else {
$job_object->log(__('Cannot transfer backup to SugarSync!', 'backwpup'), E_USER_ERROR);
return FALSE;
}
$backupfilelist = array();
$files = array();
$filecounter = 0;
$dir = $sugarsync->showdir($dirid);
$getfiles = $sugarsync->getcontents('file');
if (is_object($getfiles)) {
foreach ($getfiles->file as $getfile) {
$getfile->displayName = utf8_decode((string) $getfile->displayName);
if ($job_object->is_backup_archive($getfile->displayName)) {
$backupfilelist[strtotime((string) $getfile->lastModified)] = (string) $getfile->ref;
}
$files[$filecounter]['folder'] = 'https://' . (string) $user->nickname . '.sugarsync.com/' . $dir;
$files[$filecounter]['file'] = (string) $getfile->ref;
$files[$filecounter]['filename'] = (string) $getfile->displayName;
$files[$filecounter]['downloadurl'] = network_admin_url('admin.php') . '?page=backwpupbackups&action=downloadsugarsync&file=' . (string) $getfile->ref . '&jobid=' . $job_object->job['jobid'];
$files[$filecounter]['filesize'] = (int) $getfile->size;
$files[$filecounter]['time'] = strtotime((string) $getfile->lastModified) + get_option('gmt_offset') * 3600;
$filecounter++;
}
}
if (!empty($job_object->job['sugarmaxbackups']) && $job_object->job['sugarmaxbackups'] > 0) {
//Delete old backups
if (count($backupfilelist) > $job_object->job['sugarmaxbackups']) {
ksort($backupfilelist);
$numdeltefiles = 0;
while ($file = array_shift($backupfilelist)) {
if (count($backupfilelist) < $job_object->job['sugarmaxbackups']) {
break;
}
$sugarsync->delete($file);
//delete files on Cloud
foreach ($files as $key => $filedata) {
if ($filedata['file'] == $file) {
unset($files[$key]);
}
}
$numdeltefiles++;
}
if ($numdeltefiles > 0) {
$job_object->log(sprintf(_n('One file deleted on SugarSync folder', '%d files deleted on SugarSync folder', $numdeltefiles, 'backwpup'), $numdeltefiles), E_USER_NOTICE);
}
}
}
set_site_transient('BackWPup_' . $job_object->job['jobid'] . '_SUGARSYNC', $files, 60 * 60 * 24 * 7);
} catch (Exception $e) {
$job_object->log(E_USER_ERROR, sprintf(__('SugarSync API: %s', 'backwpup'), $e->getMessage()), $e->getFile(), $e->getLine());
return FALSE;
}
$job_object->substeps_done++;
return TRUE;
}
示例4: job_run
/**
* @param $job_object
* @return bool
*/
public function job_run(BackWPup_Job $job_object)
{
global $wpdb, $post, $wp_query;
$wxr_version = '1.2';
if ($job_object->steps_data[$job_object->step_working]['SAVE_STEP_TRY'] != $job_object->steps_data[$job_object->step_working]['STEP_TRY']) {
$job_object->log(sprintf(__('%d. Trying to create a WordPress export to XML file …', 'backwpup'), $job_object->steps_data[$job_object->step_working]['STEP_TRY']));
$job_object->steps_data[$job_object->step_working]['wpexportfile'] = BackWPup::get_plugin_data('TEMP') . $job_object->generate_filename($job_object->job['wpexportfile'], 'xml', TRUE);
$job_object->steps_data[$job_object->step_working]['substep'] = 'header';
$job_object->steps_data[$job_object->step_working]['post_ids'] = array();
$job_object->substeps_todo = 10;
$job_object->substeps_done = 0;
}
add_filter('wxr_export_skip_postmeta', array($this, 'wxr_filter_postmeta'), 10, 2);
if ($job_object->steps_data[$job_object->step_working]['substep'] == 'header') {
if ('all' != $job_object->job['wpexportcontent'] && post_type_exists($job_object->job['wpexportcontent'])) {
$ptype = get_post_type_object($job_object->job['wpexportcontent']);
if (!$ptype->can_export) {
$job_object->log(sprintf(__('WP Export: Post type “%s” does not allow export.', 'backwpup'), $job_object->job['wpexportcontent']), E_USER_ERROR);
return FALSE;
}
$where = $wpdb->prepare("{$wpdb->posts}.post_type = %s", $job_object->job['wpexportcontent']);
} else {
$post_types = get_post_types(array('can_export' => true));
$esses = array_fill(0, count($post_types), '%s');
$where = $wpdb->prepare("{$wpdb->posts}.post_type IN (" . implode(',', $esses) . ')', $post_types);
$job_object->job['wpexportcontent'] = 'all';
}
$where .= " AND {$wpdb->posts}.post_status != 'auto-draft'";
// grab a snapshot of post IDs, just in case it changes during the export
$job_object->steps_data[$job_object->step_working]['post_ids'] = $wpdb->get_col("SELECT ID FROM {$wpdb->posts} WHERE {$where}");
$job_object->substeps_todo = $job_object->substeps_todo + count($job_object->steps_data[$job_object->step_working]['post_ids']);
$header = '<?xml version="1.0" encoding="' . get_bloginfo('charset') . "\" ?>\n";
$header .= "<!-- This is a WordPress eXtended RSS file generated by the WordPress plugin BackWPup as an export of your site. -->\n";
$header .= "<!-- It contains information about your site's posts, pages, comments, categories, and other content. -->\n";
$header .= "<!-- You may use this file to transfer that content from one site to another. -->\n";
$header .= "<!-- This file is not intended to serve as a complete backup of your site. -->\n\n";
$header .= "<!-- To import this information into a WordPress site follow these steps: -->\n";
$header .= "<!-- 1. Log in to that site as an administrator. -->\n";
$header .= "<!-- 2. Go to Tools: Import in the WordPress admin panel. -->\n";
$header .= "<!-- 3. Install the \"WordPress\" importer from the list. -->\n";
$header .= "<!-- 4. Activate & Run Importer. -->\n";
$header .= "<!-- 5. Upload this file using the form provided on that page. -->\n";
$header .= "<!-- 6. You will first be asked to map the authors in this export file to users -->\n";
$header .= "<!-- on the site. For each author, you may choose to map to an -->\n";
$header .= "<!-- existing user on the site or to create a new user. -->\n";
$header .= "<!-- 7. WordPress will then import each of the posts, pages, comments, categories, etc. -->\n";
$header .= "<!-- contained in this file into your site. -->\n\n";
$header .= "<!-- generator=\"WordPress/" . get_bloginfo_rss('version') . "\" created=\"" . date('Y-m-d H:i') . "\" -->\n";
$header .= "<rss version=\"2.0\" xmlns:excerpt=\"http://wordpress.org/export/{$wxr_version}/excerpt/\" xmlns:content=\"http://purl.org/rss/1.0/modules/content/\" xmlns:wfw=\"http://wellformedweb.org/CommentAPI/\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:wp=\"http://wordpress.org/export/{$wxr_version}/\">\n";
$header .= "<channel>\n";
$header .= "\t<title>" . get_bloginfo_rss('name') . "</title>\n";
$header .= "\t<link>" . get_bloginfo_rss('url') . "</link>\n";
$header .= "\t<description>" . get_bloginfo_rss('description') . "</description>\n";
$header .= "\t<pubDate>" . date('D, d M Y H:i:s +0000') . "</pubDate>\n";
$header .= "\t<language>" . get_bloginfo_rss('language') . "</language>\n";
$header .= "\t<wp:wxr_version>" . $wxr_version . "</wp:wxr_version>\n";
$header .= "\t<wp:base_site_url>" . $this->wxr_site_url() . "</wp:base_site_url>\n";
$header .= "\t<wp:base_blog_url>" . get_bloginfo_rss('url') . "</wp:base_blog_url>\n";
$written = file_put_contents($job_object->steps_data[$job_object->step_working]['wpexportfile'], $header, FILE_APPEND);
if ($written === FALSE) {
$job_object->log(__('WP Export file could not written.', 'backwpup'), E_USER_ERROR);
return FALSE;
}
unset($header);
$job_object->steps_data[$job_object->step_working]['substep'] = 'authors';
$job_object->substeps_done++;
$job_object->update_working_data();
$job_object->do_restart_time();
}
if ($job_object->steps_data[$job_object->step_working]['substep'] == 'authors') {
$written = file_put_contents($job_object->steps_data[$job_object->step_working]['wpexportfile'], $this->wxr_authors_list(), FILE_APPEND);
if ($written === FALSE) {
$job_object->log(__('WP Export file could not written.', 'backwpup'), E_USER_ERROR);
return FALSE;
}
$job_object->steps_data[$job_object->step_working]['substep'] = 'cats';
$job_object->substeps_done++;
$job_object->update_working_data();
$job_object->do_restart_time();
}
if ($job_object->steps_data[$job_object->step_working]['substep'] == 'cats') {
if ('all' == $job_object->job['wpexportcontent']) {
$cats = array();
$categories = (array) get_categories(array('get' => 'all'));
// put categories in order with no child going before its parent
while ($cat = array_shift($categories)) {
if ($cat->parent == 0 || isset($cats[$cat->parent])) {
$cats[$cat->term_id] = $cat;
} else {
$categories[] = $cat;
}
}
$cats_xml = '';
foreach ($cats as $c) {
$parent_slug = $c->parent ? $cats[$c->parent]->slug : '';
$cats_xml .= "\t<wp:category><wp:term_id>" . $c->term_id . "</wp:term_id><wp:category_nicename>" . $c->slug . "</wp:category_nicename><wp:category_parent>" . $parent_slug . "</wp:category_parent>" . $this->wxr_cat_name($c) . $this->wxr_category_description($c) . "</wp:category>\n";
//.........这里部分代码省略.........
示例5: job_run_archive
/**
* @param $job_object
* @return bool
*/
public function job_run_archive(BackWPup_Job $job_object)
{
$job_object->substeps_todo = 2 + $job_object->backup_filesize;
if ($job_object->steps_data[$job_object->step_working]['SAVE_STEP_TRY'] != $job_object->steps_data[$job_object->step_working]['STEP_TRY']) {
$job_object->log(sprintf(__('%d. Try to send backup file to an FTP server …', 'backwpup'), $job_object->steps_data[$job_object->step_working]['STEP_TRY']), E_USER_NOTICE);
}
if (!empty($job_object->job['ftpssl'])) {
//make SSL FTP connection
if (function_exists('ftp_ssl_connect')) {
$ftp_conn_id = ftp_ssl_connect($job_object->job['ftphost'], $job_object->job['ftphostport'], $job_object->job['ftptimeout']);
if ($ftp_conn_id) {
$job_object->log(sprintf(__('Connected via explicit SSL-FTP to server: %s', 'backwpup'), $job_object->job['ftphost'] . ':' . $job_object->job['ftphostport']), E_USER_NOTICE);
} else {
$job_object->log(sprintf(__('Cannot connect via explicit SSL-FTP to server: %s', 'backwpup'), $job_object->job['ftphost'] . ':' . $job_object->job['ftphostport']), E_USER_ERROR);
return FALSE;
}
} else {
$job_object->log(__('PHP function to connect with explicit SSL-FTP to server does not exist!', 'backwpup'), E_USER_ERROR);
return TRUE;
}
} else {
//make normal FTP connection if SSL not work
$ftp_conn_id = ftp_connect($job_object->job['ftphost'], $job_object->job['ftphostport'], $job_object->job['ftptimeout']);
if ($ftp_conn_id) {
$job_object->log(sprintf(__('Connected to FTP server: %s', 'backwpup'), $job_object->job['ftphost'] . ':' . $job_object->job['ftphostport']), E_USER_NOTICE);
} else {
$job_object->log(sprintf(__('Cannot connect to FTP server: %s', 'backwpup'), $job_object->job['ftphost'] . ':' . $job_object->job['ftphostport']), E_USER_ERROR);
return FALSE;
}
}
//FTP Login
$job_object->log(sprintf(__('FTP client command: %s', 'backwpup'), 'USER ' . $job_object->job['ftpuser']), E_USER_NOTICE);
if ($loginok = @ftp_login($ftp_conn_id, $job_object->job['ftpuser'], BackWPup_Encryption::decrypt($job_object->job['ftppass']))) {
$job_object->log(sprintf(__('FTP server response: %s', 'backwpup'), 'User ' . $job_object->job['ftpuser'] . ' logged in.'), E_USER_NOTICE);
} else {
//if PHP ftp login don't work use raw login
$return = ftp_raw($ftp_conn_id, 'USER ' . $job_object->job['ftpuser']);
$job_object->log(sprintf(__('FTP server reply: %s', 'backwpup'), $return[0]), E_USER_NOTICE);
if (substr(trim($return[0]), 0, 3) <= 400) {
$job_object->log(sprintf(__('FTP client command: %s', 'backwpup'), 'PASS *******'), E_USER_NOTICE);
$return = ftp_raw($ftp_conn_id, 'PASS ' . BackWPup_Encryption::decrypt($job_object->job['ftppass']));
if (substr(trim($return[0]), 0, 3) <= 400) {
$job_object->log(sprintf(__('FTP server reply: %s', 'backwpup'), $return[0]), E_USER_NOTICE);
$loginok = TRUE;
} else {
$job_object->log(sprintf(__('FTP server reply: %s', 'backwpup'), $return[0]), E_USER_ERROR);
}
}
}
if (!$loginok) {
return FALSE;
}
//SYSTYPE
$job_object->log(sprintf(__('FTP client command: %s', 'backwpup'), 'SYST'), E_USER_NOTICE);
$systype = ftp_systype($ftp_conn_id);
if ($systype) {
$job_object->log(sprintf(__('FTP server reply: %s', 'backwpup'), $systype), E_USER_NOTICE);
} else {
$job_object->log(sprintf(__('FTP server reply: %s', 'backwpup'), __('Error getting SYSTYPE', 'backwpup')), E_USER_ERROR);
}
//set actual ftp dir to ftp dir
if (empty($job_object->job['ftpdir'])) {
$job_object->job['ftpdir'] = trailingslashit(ftp_pwd($ftp_conn_id));
}
// prepend actual ftp dir if relative dir
if (substr($job_object->job['ftpdir'], 0, 1) != '/') {
$job_object->job['ftpdir'] = trailingslashit(ftp_pwd($ftp_conn_id)) . $job_object->job['ftpdir'];
}
//test ftp dir and create it if not exists
if ($job_object->job['ftpdir'] != '/') {
@ftp_chdir($ftp_conn_id, '/');
//go to root
$ftpdirs = explode('/', trim($job_object->job['ftpdir'], '/'));
foreach ($ftpdirs as $ftpdir) {
if (empty($ftpdir)) {
continue;
}
if (!@ftp_chdir($ftp_conn_id, $ftpdir)) {
if (@ftp_mkdir($ftp_conn_id, $ftpdir)) {
$job_object->log(sprintf(__('FTP Folder "%s" created!', 'backwpup'), $ftpdir), E_USER_NOTICE);
ftp_chdir($ftp_conn_id, $ftpdir);
} else {
$job_object->log(sprintf(__('FTP Folder "%s" cannot be created!', 'backwpup'), $ftpdir), E_USER_ERROR);
return FALSE;
}
}
}
}
// Get the current working directory
$current_ftp_dir = trailingslashit(ftp_pwd($ftp_conn_id));
if ($job_object->substeps_done == 0) {
$job_object->log(sprintf(__('FTP current folder is: %s', 'backwpup'), $current_ftp_dir), E_USER_NOTICE);
}
//get file size to resume upload
@clearstatcache();
$job_object->substeps_done = @ftp_size($ftp_conn_id, $job_object->job['ftpdir'] . $job_object->backup_file);
//.........这里部分代码省略.........
示例6: job_run
/**
* Dumps the Database
*
* @param $job_object BackWPup_Job
*
* @return bool
*/
public function job_run(BackWPup_Job $job_object)
{
$job_object->substeps_todo = 1;
if ($job_object->steps_data[$job_object->step_working]['SAVE_STEP_TRY'] != $job_object->steps_data[$job_object->step_working]['STEP_TRY']) {
$job_object->log(sprintf(__('%d. Try to backup database …', 'backwpup'), $job_object->steps_data[$job_object->step_working]['STEP_TRY']));
}
//build filename
if (empty($job_object->steps_data[$job_object->step_working]['dbdumpfile'])) {
$job_object->steps_data[$job_object->step_working]['dbdumpfile'] = $job_object->generate_filename($job_object->job['dbdumpfile'], 'sql') . $job_object->job['dbdumpfilecompression'];
}
try {
//Connect to Database
$sql_dump = new BackWPup_MySQLDump(array('dumpfile' => BackWPup::get_plugin_data('TEMP') . $job_object->steps_data[$job_object->step_working]['dbdumpfile']));
if ($job_object->steps_data[$job_object->step_working]['SAVE_STEP_TRY'] != $job_object->steps_data[$job_object->step_working]['STEP_TRY']) {
$job_object->log(sprintf(__('Connected to database %1$s on %2$s', 'backwpup'), DB_NAME, DB_HOST));
}
//Exclude Tables
foreach ($sql_dump->tables_to_dump as $key => $table) {
if (in_array($table, $job_object->job['dbdumpexclude'], true)) {
unset($sql_dump->tables_to_dump[$key]);
}
}
//set steps must done
$job_object->substeps_todo = count($sql_dump->tables_to_dump);
if ($job_object->substeps_todo == 0) {
$job_object->log(__('No tables to backup.', 'backwpup'), E_USER_WARNING);
unset($sql_dump);
return TRUE;
}
//dump head
if (!isset($job_object->steps_data[$job_object->step_working]['is_head'])) {
$sql_dump->dump_head(TRUE);
$job_object->steps_data[$job_object->step_working]['is_head'] = TRUE;
}
//dump tables
$i = 0;
foreach ($sql_dump->tables_to_dump as $table) {
if ($i < $job_object->substeps_done) {
$i++;
continue;
}
if (empty($job_object->steps_data[$job_object->step_working]['tables'][$table])) {
$num_records = $sql_dump->dump_table_head($table);
$job_object->steps_data[$job_object->step_working]['tables'][$table] = array('start' => 0, 'length' => 1000);
if ($job_object->is_debug()) {
$job_object->log(sprintf(__('Backup database table "%s" with "%s" records', 'backwpup'), $table, $num_records));
}
}
$while = true;
while ($while) {
$dump_start_time = microtime(TRUE);
$done_records = $sql_dump->dump_table($table, $job_object->steps_data[$job_object->step_working]['tables'][$table]['start'], $job_object->steps_data[$job_object->step_working]['tables'][$table]['length']);
$dump_time = microtime(TRUE) - $dump_start_time;
if (empty($dump_time)) {
$dump_time = 0.01;
}
if ($done_records < $job_object->steps_data[$job_object->step_working]['tables'][$table]['length']) {
//that is the last chunk
$while = FALSE;
}
$job_object->steps_data[$job_object->step_working]['tables'][$table]['start'] = $job_object->steps_data[$job_object->step_working]['tables'][$table]['start'] + $done_records;
// dump time per record and set next length
$length = ceil($done_records / $dump_time * $job_object->get_restart_time());
if ($length > 25000 || 0 >= $job_object->get_restart_time()) {
$length = 25000;
}
if ($length < 1000) {
$length = 1000;
}
$job_object->steps_data[$job_object->step_working]['tables'][$table]['length'] = $length;
$job_object->do_restart_time();
}
$sql_dump->dump_table_footer($table);
$job_object->substeps_done++;
$i++;
$job_object->update_working_data();
}
//dump footer
$sql_dump->dump_footer();
unset($sql_dump);
} catch (Exception $e) {
$job_object->log($e->getMessage(), E_USER_ERROR, $e->getFile(), $e->getLine());
unset($sql_dump);
return FALSE;
}
$filesize = filesize(BackWPup::get_plugin_data('TEMP') . $job_object->steps_data[$job_object->step_working]['dbdumpfile']);
if (!is_file(BackWPup::get_plugin_data('TEMP') . $job_object->steps_data[$job_object->step_working]['dbdumpfile']) || $filesize < 1) {
$job_object->log(__('MySQL backup file not created', 'backwpup'), E_USER_ERROR);
return FALSE;
} else {
$job_object->additional_files_to_backup[] = BackWPup::get_plugin_data('TEMP') . $job_object->steps_data[$job_object->step_working]['dbdumpfile'];
$job_object->log(sprintf(__('Added database dump "%1$s" with %2$s to backup file list', 'backwpup'), $job_object->steps_data[$job_object->step_working]['dbdumpfile'], size_format($filesize, 2)));
}
//.........这里部分代码省略.........
示例7: job_run_archive
/**
* @param $job_object BAckWPup_Job
* @return bool
*/
public function job_run_archive(BackWPup_Job $job_object)
{
$job_object->substeps_todo = 2 + $job_object->backup_filesize;
$job_object->substeps_done = 0;
$job_object->log(sprintf(__('%d. Trying to send backup file to Rackspace cloud …', 'backwpup'), $job_object->steps_data[$job_object->step_working]['STEP_TRY']), E_USER_NOTICE);
try {
$conn = new OpenCloud\Rackspace(self::get_auth_url_by_region($job_object->job['rscregion']), array('username' => $job_object->job['rscusername'], 'apiKey' => BackWPup_Encryption::decrypt($job_object->job['rscapikey'])));
//connect to cloud files
$ostore = $conn->objectStoreService('cloudFiles', $job_object->job['rscregion'], 'publicURL');
$container = $ostore->getContainer($job_object->job['rsccontainer']);
$job_object->log(sprintf(__('Connected to Rackspace cloud files container %s', 'backwpup'), $job_object->job['rsccontainer']));
} catch (Exception $e) {
$job_object->log(E_USER_ERROR, sprintf(__('Rackspace Cloud API: %s', 'backwpup'), $e->getMessage()), $e->getFile(), $e->getLine());
return FALSE;
}
try {
//Transfer Backup to Rackspace Cloud
$job_object->substeps_done = 0;
$job_object->log(__('Upload to Rackspace cloud started …', 'backwpup'), E_USER_NOTICE);
if ($handle = fopen($job_object->backup_folder . $job_object->backup_file, 'rb')) {
$uploded = $container->uploadObject($job_object->job['rscdir'] . $job_object->backup_file, $handle);
fclose($handle);
} else {
$job_object->log(__('Can not open source file for transfer.', 'backwpup'), E_USER_ERROR);
return FALSE;
}
// $transfer = $container->setupObjectTransfer( array(
// 'name' => $job_object->job[ 'rscdir' ] . $job_object->backup_file,
// 'path' => $job_object->backup_folder . $job_object->backup_file,
// 'concurrency' => 1,
// 'partSize' => 4 * 1024 * 1024
// ) );
// $uploded = $transfer->upload();
if ($uploded) {
$job_object->log(__('Backup File transferred to RSC://', 'backwpup') . $job_object->job['rsccontainer'] . '/' . $job_object->job['rscdir'] . $job_object->backup_file, E_USER_NOTICE);
$job_object->substeps_done = 1 + $job_object->backup_filesize;
if (!empty($job_object->job['jobid'])) {
BackWPup_Option::update($job_object->job['jobid'], 'lastbackupdownloadurl', network_admin_url('admin.php') . '?page=backwpupbackups&action=downloadrsc&file=' . $job_object->job['rscdir'] . $job_object->backup_file . '&jobid=' . $job_object->job['jobid']);
}
} else {
$job_object->log(__('Cannot transfer backup to Rackspace cloud.', 'backwpup'), E_USER_ERROR);
return FALSE;
}
} catch (Exception $e) {
$job_object->log(E_USER_ERROR, sprintf(__('Rackspace Cloud API: %s', 'backwpup'), $e->getMessage()), $e->getFile(), $e->getLine());
return FALSE;
}
try {
$backupfilelist = array();
$filecounter = 0;
$files = array();
$objlist = $container->objectList(array('prefix' => $job_object->job['rscdir']));
while ($object = $objlist->next()) {
$file = basename($object->getName());
if ($job_object->job['rscdir'] . $file == $object->getName()) {
//only in the folder and not in complete bucket
if ($job_object->is_backup_archive($file)) {
$backupfilelist[strtotime($object->getLastModified())] = $object;
}
}
$files[$filecounter]['folder'] = "RSC://" . $job_object->job['rsccontainer'] . "/" . dirname($object->getName()) . "/";
$files[$filecounter]['file'] = $object->getName();
$files[$filecounter]['filename'] = basename($object->getName());
$files[$filecounter]['downloadurl'] = network_admin_url('admin.php') . '?page=backwpupbackups&action=downloadrsc&file=' . $object->getName() . '&jobid=' . $job_object->job['jobid'];
$files[$filecounter]['filesize'] = $object->getContentLength();
$files[$filecounter]['time'] = strtotime($object->getLastModified());
$filecounter++;
}
if (!empty($job_object->job['rscmaxbackups']) && $job_object->job['rscmaxbackups'] > 0) {
//Delete old backups
if (count($backupfilelist) > $job_object->job['rscmaxbackups']) {
ksort($backupfilelist);
$numdeltefiles = 0;
while ($file = array_shift($backupfilelist)) {
if (count($backupfilelist) < $job_object->job['rscmaxbackups']) {
break;
}
foreach ($files as $key => $filedata) {
if ($filedata['file'] == $file->getName()) {
unset($files[$key]);
}
}
$file->delete();
$numdeltefiles++;
}
if ($numdeltefiles > 0) {
$job_object->log(sprintf(_n('One file deleted on Rackspace cloud container.', '%d files deleted on Rackspace cloud container.', $numdeltefiles, 'backwpup'), $numdeltefiles), E_USER_NOTICE);
}
}
}
set_site_transient('backwpup_' . $job_object->job['jobid'] . '_rsc', $files, 60 * 60 * 24 * 7);
} catch (Exception $e) {
$job_object->log(E_USER_ERROR, sprintf(__('Rackspace Cloud API: %s', 'backwpup'), $e->getMessage()), $e->getFile(), $e->getLine());
return FALSE;
}
$job_object->substeps_done++;
//.........这里部分代码省略.........
示例8: job_run_archive
/**
* @param $job_object BAckWPup_Job
* @return bool
*/
public function job_run_archive(BackWPup_Job $job_object)
{
$job_object->substeps_todo = 2 + $job_object->backup_filesize;
if ($job_object->steps_data[$job_object->step_working]['SAVE_STEP_TRY'] != $job_object->steps_data[$job_object->step_working]['STEP_TRY']) {
$job_object->log(sprintf(__('%d. Trying to send backup file to S3 Service …', 'backwpup'), $job_object->steps_data[$job_object->step_working]['STEP_TRY']), E_USER_NOTICE);
}
try {
$s3 = Aws\S3\S3Client::factory(array('key' => $job_object->job['s3accesskey'], 'secret' => BackWPup_Encryption::decrypt($job_object->job['s3secretkey']), 'region' => $job_object->job['s3region'], 'base_url' => $this->get_s3_base_url($job_object->job['s3region'], $job_object->job['s3base_url']), 'scheme' => 'https', 'ssl.certificate_authority' => BackWPup::get_plugin_data('cacert')));
if ($job_object->steps_data[$job_object->step_working]['SAVE_STEP_TRY'] != $job_object->steps_data[$job_object->step_working]['STEP_TRY'] && $job_object->substeps_done < $job_object->backup_filesize) {
if ($s3->doesBucketExist($job_object->job['s3bucket'])) {
$bucketregion = $s3->getBucketLocation(array('Bucket' => $job_object->job['s3bucket']));
$job_object->log(sprintf(__('Connected to S3 Bucket "%1$s" in %2$s', 'backwpup'), $job_object->job['s3bucket'], $bucketregion->get('Location')), E_USER_NOTICE);
} else {
$job_object->log(sprintf(__('S3 Bucket "%s" does not exist!', 'backwpup'), $job_object->job['s3bucket']), E_USER_ERROR);
return TRUE;
}
if ($job_object->job['s3multipart'] && empty($job_object->steps_data[$job_object->step_working]['UploadId'])) {
//Check for aboded Multipart Uploads
$job_object->log(__('Checking for not aborted multipart Uploads …', 'backwpup'));
$multipart_uploads = $s3->listMultipartUploads(array('Bucket' => $job_object->job['s3bucket'], 'Prefix' => (string) $job_object->job['s3dir']));
$uploads = $multipart_uploads->get('Uploads');
if (!empty($uploads)) {
foreach ($uploads as $upload) {
$s3->abortMultipartUpload(array('Bucket' => $job_object->job['s3bucket'], 'Key' => $upload['Key'], 'UploadId' => $upload['UploadId']));
$job_object->log(sprintf(__('Upload for %s aborted.', 'backwpup'), $upload['Key']));
}
}
}
//transfer file to S3
$job_object->log(__('Starting upload to S3 Service …', 'backwpup'));
}
if (!$job_object->job['s3multipart'] || $job_object->backup_filesize < 1048576 * 6) {
//Prepare Upload
if (!($up_file_handle = fopen($job_object->backup_folder . $job_object->backup_file, 'rb'))) {
$job_object->log(__('Can not open source file for transfer.', 'backwpup'), E_USER_ERROR);
return FALSE;
}
$create_args = array();
$create_args['Bucket'] = $job_object->job['s3bucket'];
$create_args['ACL'] = 'private';
//encrxption
if (!empty($job_object->job['s3ssencrypt'])) {
$create_args['ServerSideEncryption'] = $job_object->job['s3ssencrypt'];
}
//Storage Class
if (!empty($job_object->job['s3storageclass'])) {
$create_args['StorageClass'] = $job_object->job['s3storageclass'];
}
$create_args['Metadata'] = array('BackupTime' => date('Y-m-d H:i:s', $job_object->start_time));
$create_args['Body'] = $up_file_handle;
$create_args['Key'] = $job_object->job['s3dir'] . $job_object->backup_file;
$create_args['ContentType'] = $job_object->get_mime_type($job_object->backup_folder . $job_object->backup_file);
try {
$s3->putObject($create_args);
} catch (Aws\Common\Exception\MultipartUploadException $e) {
$job_object->log(E_USER_ERROR, sprintf(__('S3 Service API: %s', 'backwpup'), $e->getMessage()), $e->getFile(), $e->getLine());
return FALSE;
}
} else {
//Prepare Upload
if ($file_handle = fopen($job_object->backup_folder . $job_object->backup_file, 'rb')) {
fseek($file_handle, $job_object->substeps_done);
try {
if (empty($job_object->steps_data[$job_object->step_working]['UploadId'])) {
$args = array('ACL' => 'private', 'Bucket' => $job_object->job['s3bucket'], 'ContentType' => $job_object->get_mime_type($job_object->backup_folder . $job_object->backup_file), 'Key' => $job_object->job['s3dir'] . $job_object->backup_file);
if (!empty($job_object->job['s3ssencrypt'])) {
$args['ServerSideEncryption'] = $job_object->job['s3ssencrypt'];
}
if (!empty($job_object->job['s3storageclass'])) {
$args['StorageClass'] = empty($job_object->job['s3storageclass']) ? '' : $job_object->job['s3storageclass'];
}
$upload = $s3->createMultipartUpload($args);
$job_object->steps_data[$job_object->step_working]['UploadId'] = $upload->get('UploadId');
$job_object->steps_data[$job_object->step_working]['Parts'] = array();
$job_object->steps_data[$job_object->step_working]['Part'] = 1;
}
while (!feof($file_handle)) {
$chunk_upload_start = microtime(TRUE);
$part_data = fread($file_handle, 1048576 * 5);
//5MB Minimum part size
$part = $s3->uploadPart(array('Bucket' => $job_object->job['s3bucket'], 'UploadId' => $job_object->steps_data[$job_object->step_working]['UploadId'], 'Key' => $job_object->job['s3dir'] . $job_object->backup_file, 'PartNumber' => $job_object->steps_data[$job_object->step_working]['Part'], 'Body' => $part_data));
$chunk_upload_time = microtime(TRUE) - $chunk_upload_start;
$job_object->substeps_done = $job_object->substeps_done + strlen($part_data);
$job_object->steps_data[$job_object->step_working]['Parts'][] = array('ETag' => $part->get('ETag'), 'PartNumber' => $job_object->steps_data[$job_object->step_working]['Part']);
$job_object->steps_data[$job_object->step_working]['Part']++;
$time_remaining = $job_object->do_restart_time();
if ($time_remaining < $chunk_upload_time) {
$job_object->do_restart_time(TRUE);
}
$job_object->update_working_data();
}
$s3->completeMultipartUpload(array('Bucket' => $job_object->job['s3bucket'], 'UploadId' => $job_object->steps_data[$job_object->step_working]['UploadId'], 'Key' => $job_object->job['s3dir'] . $job_object->backup_file, 'Parts' => $job_object->steps_data[$job_object->step_working]['Parts']));
} catch (Exception $e) {
$job_object->log(E_USER_ERROR, sprintf(__('S3 Service API: %s', 'backwpup'), $e->getMessage()), $e->getFile(), $e->getLine());
if (!empty($job_object->steps_data[$job_object->step_working]['uploadId'])) {
$s3->abortMultipartUpload(array('Bucket' => $job_object->job['s3bucket'], 'UploadId' => $job_object->steps_data[$job_object->step_working]['uploadId'], 'Key' => $job_object->job['s3dir'] . $job_object->backup_file));
//.........这里部分代码省略.........
示例9: job_run_archive
/**
* @param $job_object
* @return bool
*/
public function job_run_archive(BackWPup_Job $job_object)
{
$job_object->substeps_todo = 1;
if (!empty($job_object->job['jobid'])) {
BackWPup_Option::update($job_object->job['jobid'], 'lastbackupdownloadurl', add_query_arg(array('page' => 'backwpupbackups', 'action' => 'downloadfolder', 'file' => basename($job_object->backup_file), 'jobid' => $job_object->job['jobid']), network_admin_url('admin.php')));
}
//Delete old Backupfiles
$backupfilelist = array();
$files = array();
if (is_writable($job_object->backup_folder) && ($dir = opendir($job_object->backup_folder))) {
//make file list
while (FALSE !== ($file = readdir($dir))) {
if (is_writeable($job_object->backup_folder . $file) && !is_dir($job_object->backup_folder . $file) && !is_link($job_object->backup_folder . $file)) {
//list for deletion
if ($job_object->is_backup_archive($file)) {
$backupfilelist[filemtime($job_object->backup_folder . $file)] = $file;
}
}
}
closedir($dir);
}
if ($job_object->job['maxbackups'] > 0) {
if (count($backupfilelist) > $job_object->job['maxbackups']) {
ksort($backupfilelist);
$numdeltefiles = 0;
while ($file = array_shift($backupfilelist)) {
if (count($backupfilelist) < $job_object->job['maxbackups']) {
break;
}
unlink($job_object->backup_folder . $file);
foreach ($files as $key => $filedata) {
if ($filedata['file'] == $job_object->backup_folder . $file) {
unset($files[$key]);
}
}
$numdeltefiles++;
}
if ($numdeltefiles > 0) {
$job_object->log(sprintf(_n('One backup file deleted', '%d backup files deleted', $numdeltefiles, 'backwpup'), $numdeltefiles), E_USER_NOTICE);
}
}
}
$job_object->substeps_done++;
return TRUE;
}
示例10: job_run_archive
/**
* @param $job_object BackWPup_Job
* @return bool
*/
public function job_run_archive(BackWPup_Job $job_object)
{
$job_object->substeps_todo = 2 + $job_object->backup_filesize;
$job_object->log(sprintf(__('%d. Trying to send backup file to S3 Service …', 'backwpup'), $job_object->steps_data[$job_object->step_working]['STEP_TRY']), E_USER_NOTICE);
try {
$s3 = new AmazonS3(array('key' => $job_object->job['s3accesskey'], 'secret' => BackWPup_Encryption::decrypt($job_object->job['s3secretkey']), 'certificate_authority' => TRUE));
$base_url = $this->get_s3_base_url($job_object->job['s3region'], $job_object->job['s3base_url']);
if (stristr($base_url, 'amazonaws.com')) {
$s3->set_region(str_replace(array('http://', 'https://'), '', $base_url));
} else {
$s3->set_hostname(str_replace(array('http://', 'https://'), '', $base_url));
$s3->allow_hostname_override(FALSE);
if (substr($base_url, -1) == '/') {
$s3->enable_path_style(TRUE);
}
}
if (stristr($base_url, 'http://')) {
$s3->disable_ssl();
}
if ($s3->if_bucket_exists($job_object->job['s3bucket'])) {
$job_object->log(sprintf(__('Connected to S3 Bucket "%1$s" in %2$s', 'backwpup'), $job_object->job['s3bucket'], $base_url), E_USER_NOTICE);
} else {
$job_object->log(sprintf(__('S3 Bucket "%s" does not exist!', 'backwpup'), $job_object->job['s3bucket']), E_USER_ERROR);
return TRUE;
}
//transfer file to S3
$job_object->log(__('Starting upload to S3 Service …', 'backwpup'), E_USER_NOTICE);
//Transfer Backup to S3
if ($job_object->job['s3storageclass'] == 'REDUCED_REDUNDANCY') {
//set reduced redundancy or not
$storage = AmazonS3::STORAGE_REDUCED;
} else {
$storage = AmazonS3::STORAGE_STANDARD;
}
if (empty($job_object->job['s3ssencrypt'])) {
$job_object->job['s3ssencrypt'] = NULL;
}
//set progress bar
$s3->register_streaming_read_callback(array($job_object, 'curl_read_callback'));
$result = $s3->create_object($job_object->job['s3bucket'], $job_object->job['s3dir'] . $job_object->backup_file, array('fileUpload' => $job_object->backup_folder . $job_object->backup_file, 'acl' => AmazonS3::ACL_PRIVATE, 'storage' => $storage, 'encryption' => $job_object->job['s3ssencrypt']));
if ($result->status >= 200 and $result->status < 300) {
$job_object->substeps_done = 1 + $job_object->backup_filesize;
$job_object->log(sprintf(__('Backup transferred to %s.', 'backwpup'), $this->get_s3_base_url($job_object->job['s3region'], $job_object->job['s3base_url']) . '/' . $job_object->job['s3bucket'] . '/' . $job_object->job['s3dir'] . $job_object->backup_file), E_USER_NOTICE);
if (!empty($job_object->job['jobid'])) {
BackWPup_Option::update($job_object->job['jobid'], 'lastbackupdownloadurl', network_admin_url('admin.php') . '?page=backwpupbackups&action=downloads3&file=' . $job_object->job['s3dir'] . $job_object->backup_file . '&jobid=' . $job_object->job['jobid']);
}
} else {
$job_object->log(sprintf(__('Cannot transfer backup to S3! (%1$d) %2$s', 'backwpup'), $result->status, $result->body), E_USER_ERROR);
}
} catch (Exception $e) {
$job_object->log(E_USER_ERROR, sprintf(__('S3 Service API: %s', 'backwpup'), $e->getMessage()), $e->getFile(), $e->getLine());
return FALSE;
}
try {
$backupfilelist = array();
$filecounter = 0;
$files = array();
$objects = $s3->list_objects($job_object->job['s3bucket'], array('prefix' => $job_object->job['s3dir']));
if (is_object($objects)) {
foreach ($objects->body->Contents as $object) {
$file = basename((string) $object->Key);
$changetime = strtotime((string) $object->LastModified) + get_option('gmt_offset') * 3600;
if ($job_object->is_backup_archive($file)) {
$backupfilelist[$changetime] = $file;
}
$files[$filecounter]['folder'] = $this->get_s3_base_url($job_object->job['s3region'], $job_object->job['s3base_url']) . '/' . $job_object->job['s3bucket'] . '/' . dirname((string) $object->Key);
$files[$filecounter]['file'] = (string) $object->Key;
$files[$filecounter]['filename'] = basename($object->Key);
$files[$filecounter]['downloadurl'] = network_admin_url('admin.php') . '?page=backwpupbackups&action=downloads3&file=' . (string) $object->Key . '&jobid=' . $job_object->job['jobid'];
$files[$filecounter]['filesize'] = (int) $object->Size;
$files[$filecounter]['time'] = $changetime;
$filecounter++;
}
}
if ($job_object->job['s3maxbackups'] > 0 && is_object($s3)) {
//Delete old backups
if (count($backupfilelist) > $job_object->job['s3maxbackups']) {
ksort($backupfilelist);
$numdeltefiles = 0;
while ($file = array_shift($backupfilelist)) {
if (count($backupfilelist) < $job_object->job['s3maxbackups']) {
break;
}
//delete files on S3
$delete_s3 = $s3->delete_object($job_object->job['s3bucket'], $job_object->job['s3dir'] . $file);
if ($delete_s3) {
foreach ($files as $key => $filedata) {
if ($filedata['file'] == $job_object->job['s3dir'] . $file) {
unset($files[$key]);
}
}
$numdeltefiles++;
} else {
$job_object->log(sprintf(__('Cannot delete backup from %s.', 'backwpup'), $this->get_s3_base_url($job_object->job['s3region'], $job_object->job['s3base_url']) . '/' . $job_object->job['s3bucket'] . '/' . $job_object->job['s3dir'] . $file), E_USER_ERROR);
}
}
//.........这里部分代码省略.........
示例11: job_run_archive
/**
* @param $job_object
* @return bool
*/
public function job_run_archive(BackWPup_Job $job_object)
{
$job_object->substeps_todo = $job_object->backup_filesize + 2;
if ($job_object->steps_data[$job_object->step_working]['SAVE_STEP_TRY'] != $job_object->steps_data[$job_object->step_working]['STEP_TRY']) {
$job_object->log(sprintf(__('%d. Try sending backup to a Microsoft Azure (Blob) …', 'backwpup'), $job_object->steps_data[$job_object->step_working]['STEP_TRY']), E_USER_NOTICE);
}
try {
set_include_path(get_include_path() . PATH_SEPARATOR . BackWPup::get_plugin_data('plugindir') . '/vendor/PEAR/');
/* @var $blobRestProxy WindowsAzure\Blob\BlobRestProxy */
//https causes an error SSL: Connection reset by peer that is why http
$blobRestProxy = WindowsAzure\Common\ServicesBuilder::getInstance()->createBlobService('DefaultEndpointsProtocol=http;AccountName=' . $job_object->job['msazureaccname'] . ';AccountKey=' . BackWPup_Encryption::decrypt($job_object->job['msazurekey']));
if ($job_object->steps_data[$job_object->step_working]['SAVE_STEP_TRY'] != $job_object->steps_data[$job_object->step_working]['STEP_TRY']) {
//test vor existing container
$containers = $blobRestProxy->listContainers()->getContainers();
$job_object->steps_data[$job_object->step_working]['container_url'] = '';
foreach ($containers as $container) {
if ($container->getName() == $job_object->job['msazurecontainer']) {
$job_object->steps_data[$job_object->step_working]['container_url'] = $container->getUrl();
break;
}
}
if (!$job_object->steps_data[$job_object->step_working]['container_url']) {
$job_object->log(sprintf(__('MS Azure container "%s" does not exist!', 'backwpup'), $job_object->job['msazurecontainer']), E_USER_ERROR);
return TRUE;
} else {
$job_object->log(sprintf(__('Connected to MS Azure container "%s".', 'backwpup'), $job_object->job['msazurecontainer']), E_USER_NOTICE);
}
$job_object->log(__('Starting upload to MS Azure …', 'backwpup'), E_USER_NOTICE);
}
//Prepare Upload
if ($file_handel = fopen($job_object->backup_folder . $job_object->backup_file, 'rb')) {
fseek($file_handel, $job_object->substeps_done);
if (empty($job_object->steps_data[$job_object->step_working]['BlockList'])) {
$job_object->steps_data[$job_object->step_working]['BlockList'] = array();
}
while (!feof($file_handel)) {
$data = fread($file_handel, 1048576 * 4);
//4MB
if (strlen($data) == 0) {
continue;
}
$chunk_upload_start = microtime(TRUE);
$block_count = count($job_object->steps_data[$job_object->step_working]['BlockList']) + 1;
$block_id = md5($data) . str_pad($block_count, 6, "0", STR_PAD_LEFT);
$blobRestProxy->createBlobBlock($job_object->job['msazurecontainer'], $job_object->job['msazuredir'] . $job_object->backup_file, $block_id, $data);
$job_object->steps_data[$job_object->step_working]['BlockList'][] = $block_id;
$chunk_upload_time = microtime(TRUE) - $chunk_upload_start;
$job_object->substeps_done = $job_object->substeps_done + strlen($data);
$time_remaining = $job_object->do_restart_time();
if ($time_remaining < $chunk_upload_time) {
$job_object->do_restart_time(TRUE);
}
$job_object->update_working_data();
}
fclose($file_handel);
} else {
$job_object->log(__('Can not open source file for transfer.', 'backwpup'), E_USER_ERROR);
return FALSE;
}
//crate blog list
$blocklist = new WindowsAzure\Blob\Models\BlockList();
foreach ($job_object->steps_data[$job_object->step_working]['BlockList'] as $block_id) {
$blocklist->addUncommittedEntry($block_id);
}
unset($job_object->steps_data[$job_object->step_working]['BlockList']);
//Commit Blocks
$blobRestProxy->commitBlobBlocks($job_object->job['msazurecontainer'], $job_object->job['msazuredir'] . $job_object->backup_file, $blocklist->getEntries());
$job_object->substeps_done++;
$job_object->log(sprintf(__('Backup transferred to %s', 'backwpup'), $job_object->steps_data[$job_object->step_working]['container_url'] . '/' . $job_object->job['msazuredir'] . $job_object->backup_file), E_USER_NOTICE);
if (!empty($job_object->job['jobid'])) {
BackWPup_Option::update($job_object->job['jobid'], 'lastbackupdownloadurl', network_admin_url('admin.php') . '?page=backwpupbackups&action=downloadmsazure&file=' . $job_object->job['msazuredir'] . $job_object->backup_file . '&jobid=' . $job_object->job['jobid']);
}
} catch (Exception $e) {
$job_object->log(E_USER_ERROR, sprintf(__('Microsoft Azure API: %s', 'backwpup'), $e->getMessage()), $e->getFile(), $e->getLine());
$job_object->substeps_done = 0;
unset($job_object->steps_data[$job_object->step_working]['BlockList']);
if (isset($file_handel) && is_resource($file_handel)) {
fclose($file_handel);
}
return FALSE;
}
try {
$backupfilelist = array();
$filecounter = 0;
$files = array();
$blob_options = new WindowsAzure\Blob\Models\ListBlobsOptions();
$blob_options->setPrefix($job_object->job['msazuredir']);
$blobs = $blobRestProxy->listBlobs($job_object->job['msazurecontainer'], $blob_options)->getBlobs();
if (is_array($blobs)) {
foreach ($blobs as $blob) {
$file = basename($blob->getName());
if ($job_object->is_backup_archive($file)) {
$backupfilelist[$blob->getProperties()->getLastModified()->getTimestamp()] = $file;
}
$files[$filecounter]['folder'] = $job_object->steps_data[$job_object->step_working]['container_url'] . "/" . dirname($blob->getName()) . "/";
$files[$filecounter]['file'] = $blob->getName();
//.........这里部分代码省略.........
示例12: job_run
/**
* @param $job_object
* @return bool
*/
public function job_run(BackWPup_Job $job_object)
{
$abs_path = realpath(ABSPATH);
if ($job_object->job['backupabsfolderup']) {
$abs_path = dirname($abs_path);
}
$abs_path = trailingslashit(str_replace('\\', '/', $abs_path));
$job_object->log(sprintf(__('%d. Trying to make a list of folders to back up …', 'backwpup'), $job_object->steps_data[$job_object->step_working]['STEP_TRY']));
$job_object->substeps_todo = 8;
$job_object->temp['folders_to_backup'] = array();
//Folder lists for blog folders
if ($job_object->substeps_done == 0) {
if ($abs_path && !empty($job_object->job['backuproot'])) {
$abs_path = trailingslashit(str_replace('\\', '/', $abs_path));
$excludes = $this->get_exclude_dirs($abs_path);
foreach ($job_object->job['backuprootexcludedirs'] as $folder) {
$excludes[] = trailingslashit($abs_path . $folder);
}
$this->get_folder_list($job_object, $abs_path, $excludes);
$job_object->add_folders_to_backup($this->folers_to_backup);
$this->folers_to_backup = array();
}
$job_object->substeps_done = 1;
$job_object->update_working_data();
$job_object->do_restart_time();
}
if ($job_object->substeps_done == 1) {
$wp_content_dir = realpath(WP_CONTENT_DIR);
if ($wp_content_dir && !empty($job_object->job['backupcontent'])) {
$wp_content_dir = trailingslashit(str_replace('\\', '/', $wp_content_dir));
$excludes = $this->get_exclude_dirs($wp_content_dir);
foreach ($job_object->job['backupcontentexcludedirs'] as $folder) {
$excludes[] = trailingslashit($wp_content_dir . $folder);
}
$this->get_folder_list($job_object, $wp_content_dir, $excludes);
$job_object->add_folders_to_backup($this->folers_to_backup);
$this->folers_to_backup = array();
}
$job_object->substeps_done = 2;
$job_object->update_working_data();
$job_object->do_restart_time();
}
if ($job_object->substeps_done == 2) {
$wp_plugin_dir = realpath(WP_PLUGIN_DIR);
if ($wp_plugin_dir && !empty($job_object->job['backupplugins'])) {
$wp_plugin_dir = trailingslashit(str_replace('\\', '/', $wp_plugin_dir));
$excludes = $this->get_exclude_dirs($wp_plugin_dir);
foreach ($job_object->job['backuppluginsexcludedirs'] as $folder) {
$excludes[] = trailingslashit($wp_plugin_dir . $folder);
}
$this->get_folder_list($job_object, $wp_plugin_dir, $excludes);
$job_object->add_folders_to_backup($this->folers_to_backup);
$this->folers_to_backup = array();
}
$job_object->substeps_done = 3;
$job_object->update_working_data();
$job_object->do_restart_time();
}
if ($job_object->substeps_done == 3) {
$theme_root = realpath(get_theme_root());
if ($theme_root && !empty($job_object->job['backupthemes'])) {
$theme_root = trailingslashit(str_replace('\\', '/', $theme_root));
$excludes = $this->get_exclude_dirs($theme_root);
foreach ($job_object->job['backupthemesexcludedirs'] as $folder) {
$excludes[] = trailingslashit($theme_root . $folder);
}
$this->get_folder_list($job_object, $theme_root, $excludes);
$job_object->add_folders_to_backup($this->folers_to_backup);
$this->folers_to_backup = array();
}
$job_object->substeps_done = 4;
$job_object->update_working_data();
$job_object->do_restart_time();
}
if ($job_object->substeps_done == 4) {
$upload_dir = realpath(BackWPup_File::get_upload_dir());
if ($upload_dir && !empty($job_object->job['backupuploads'])) {
$upload_dir = trailingslashit(str_replace('\\', '/', $upload_dir));
$excludes = $this->get_exclude_dirs($upload_dir);
foreach ($job_object->job['backupuploadsexcludedirs'] as $folder) {
$excludes[] = trailingslashit($upload_dir . $folder);
}
$this->get_folder_list($job_object, $upload_dir, $excludes);
$job_object->add_folders_to_backup($this->folers_to_backup);
$this->folers_to_backup = array();
}
$job_object->substeps_done = 5;
$job_object->update_working_data();
$job_object->do_restart_time();
}
if ($job_object->substeps_done == 5) {
//include dirs
if ($job_object->job['dirinclude']) {
$dirinclude = explode(',', $job_object->job['dirinclude']);
$dirinclude = array_unique($dirinclude);
//Crate file list for includes
//.........这里部分代码省略.........
示例13: job_run
/**
* @param $job_object
* @return bool
*/
public function job_run(BackWPup_Job $job_object)
{
global $wpdb;
/* @var wpdb $wpdb */
$job_object->log(sprintf(__('%d. Trying to check database …', 'backwpup'), $job_object->steps_data[$job_object->step_working]['STEP_TRY']));
if (!isset($job_object->steps_data[$job_object->step_working]['DONETABLE']) || !is_array($job_object->steps_data[$job_object->step_working]['DONETABLE'])) {
$job_object->steps_data[$job_object->step_working]['DONETABLE'] = array();
}
//to check
$tables = array();
$tablestype = array();
$restables = $wpdb->get_results('SHOW FULL TABLES FROM `' . DB_NAME . '`', ARRAY_N);
foreach ($restables as $table) {
if ($job_object->job['dbcheckwponly'] && substr($table[0], 0, strlen($wpdb->prefix)) != $wpdb->prefix) {
continue;
}
$tables[] = $table[0];
$tablestype[$table[0]] = $table[1];
}
//Set num
$job_object->substeps_todo = sizeof($tables);
//Get table status
$status = array();
$resstatus = $wpdb->get_results("SHOW TABLE STATUS FROM `" . DB_NAME . "`", ARRAY_A);
foreach ($resstatus as $tablestatus) {
$status[$tablestatus['Name']] = $tablestatus;
}
//check tables
if ($job_object->substeps_todo > 0) {
foreach ($tables as $table) {
if (in_array($table, $job_object->steps_data[$job_object->step_working]['DONETABLE'], true)) {
continue;
}
if ($tablestype[$table] == 'VIEW') {
$job_object->log(sprintf(__('Table %1$s is a view. Not checked.', 'backwpup'), $table));
continue;
}
if ($status[$table]['Engine'] != 'MyISAM' && $status[$table]['Engine'] != 'InnoDB') {
$job_object->log(sprintf(__('Table %1$s is not a MyISAM/InnoDB table. Not checked.', 'backwpup'), $table));
continue;
}
//CHECK TABLE funktioniert bei MyISAM- und InnoDB-Tabellen (http://dev.mysql.com/doc/refman/5.1/de/check-table.html)
$check = $wpdb->get_row("CHECK TABLE `" . $table . "` MEDIUM", OBJECT);
if (strtolower($check->Msg_text) == 'ok') {
if ($job_object->is_debug()) {
$job_object->log(sprintf(__('Result of table check for %1$s is: %2$s', 'backwpup'), $table, $check->Msg_text));
}
} elseif (strtolower($check->Msg_type) == 'warning') {
$job_object->log(sprintf(__('Result of table check for %1$s is: %2$s', 'backwpup'), $table, $check->Msg_text), E_USER_WARNING);
} else {
$job_object->log(sprintf(__('Result of table check for %1$s is: %2$s', 'backwpup'), $table, $check->Msg_text), E_USER_ERROR);
}
//Try to Repair table
if (!empty($job_object->job['dbcheckrepair']) && strtolower($check->Msg_text) != 'ok' && $status[$table]['Engine'] == 'MyISAM') {
$repair = $wpdb->get_row('REPAIR TABLE `' . $table . '` EXTENDED', OBJECT);
if (strtolower($repair->Msg_text) == 'ok') {
$job_object->log(sprintf(__('Result of table repair for %1$s is: %2$s', 'backwpup'), $table, $repair->Msg_text));
} elseif (strtolower($repair->Msg_type) == 'warning') {
$job_object->log(sprintf(__('Result of table repair for %1$s is: %2$s', 'backwpup'), $table, $repair->Msg_text), E_USER_WARNING);
} else {
$job_object->log(sprintf(__('Result of table repair for %1$s is: %2$s', 'backwpup'), $table, $repair->Msg_text), E_USER_ERROR);
}
}
$job_object->steps_data[$job_object->step_working]['DONETABLE'][] = $table;
$job_object->substeps_done++;
}
$job_object->log(__('Database check done!', 'backwpup'));
} else {
$job_object->log(__('No tables to check.', 'backwpup'));
}
unset($job_object->steps_data[$job_object->step_working]['DONETABLE']);
return TRUE;
}
示例14: job_run
/**
* @param $job_object
* @return bool
*/
public function job_run(BackWPup_Job $job_object)
{
$job_object->substeps_todo = 1;
$job_object->log(sprintf(__('%d. Trying to generate a file with installed plugin names …', 'backwpup'), $job_object->steps_data[$job_object->step_working]['STEP_TRY']));
//build filename
if (empty($job_object->temp['pluginlistfile'])) {
$job_object->temp['pluginlistfile'] = $job_object->generate_filename($job_object->job['pluginlistfile'], 'txt') . $job_object->job['pluginlistfilecompression'];
}
if ($job_object->job['pluginlistfilecompression'] == '.gz') {
$handle = fopen('compress.zlib://' . BackWPup::get_plugin_data('TEMP') . $job_object->temp['pluginlistfile'], 'w');
} elseif ($job_object->job['pluginlistfilecompression'] == '.bz2') {
$handle = fopen('compress.bzip2://' . BackWPup::get_plugin_data('TEMP') . $job_object->temp['pluginlistfile'], 'w');
} else {
$handle = fopen(BackWPup::get_plugin_data('TEMP') . $job_object->temp['pluginlistfile'], 'w');
}
if ($handle) {
//open file
$header = "------------------------------------------------------------" . PHP_EOL;
$header .= " Plugin list generated with BackWPup version: " . BackWPup::get_plugin_data('Version') . PHP_EOL;
$header .= " " . translate(BackWPup::get_plugin_data('pluginuri'), 'backwpup') . PHP_EOL;
$header .= " Blog Name: " . get_bloginfo('name') . PHP_EOL;
$header .= " Blog URL: " . get_bloginfo('url') . PHP_EOL;
$header .= " Generated on: " . date('Y-m-d H:i.s', current_time('timestamp')) . PHP_EOL;
$header .= "------------------------------------------------------------" . PHP_EOL . PHP_EOL;
fwrite($handle, $header);
//get Plugins
if (!function_exists('get_plugins')) {
require_once ABSPATH . 'wp-admin/includes/plugin.php';
}
$plugins = get_plugins();
$plugins_active = get_option('active_plugins');
//write it to file
fwrite($handle, PHP_EOL . __('All plugin information:', 'backwpup') . PHP_EOL . '------------------------------' . PHP_EOL);
foreach ($plugins as $plugin) {
fwrite($handle, $plugin['Name'] . ' (v.' . $plugin['Version'] . ') ' . html_entity_decode(sprintf(__('from %s', 'backwpup'), $plugin['Author']), ENT_QUOTES) . PHP_EOL . "\t" . $plugin['PluginURI'] . PHP_EOL);
}
fwrite($handle, PHP_EOL . __('Active plugins:', 'backwpup') . PHP_EOL . '------------------------------' . PHP_EOL);
foreach ($plugins as $key => $plugin) {
if (in_array($key, $plugins_active)) {
fwrite($handle, $plugin['Name'] . PHP_EOL);
}
}
fwrite($handle, PHP_EOL . __('Inactive plugins:', 'backwpup') . PHP_EOL . '------------------------------' . PHP_EOL);
foreach ($plugins as $key => $plugin) {
if (!in_array($key, $plugins_active)) {
fwrite($handle, $plugin['Name'] . PHP_EOL);
}
}
fclose($handle);
} else {
$job_object->log(__('Can not open target file for writing.', 'backwpup'), E_USER_ERROR);
return FALSE;
}
//add file to backup files
if (is_readable(BackWPup::get_plugin_data('TEMP') . $job_object->temp['pluginlistfile'])) {
$job_object->additional_files_to_backup[] = BackWPup::get_plugin_data('TEMP') . $job_object->temp['pluginlistfile'];
$job_object->log(sprintf(__('Added plugin list file "%1$s" with %2$s to backup file list.', 'backwpup'), $job_object->temp['pluginlistfile'], size_format(filesize(BackWPup::get_plugin_data('TEMP') . $job_object->temp['pluginlistfile']), 2)));
}
$job_object->substeps_done = 1;
return TRUE;
}
示例15: job_run_archive
/**
* @param $job_object
* @return bool
*/
public function job_run_archive(BackWPup_Job $job_object)
{
$job_object->substeps_todo = 1;
$job_object->log(sprintf(__('%d. Try to send backup with email …', 'backwpup'), $job_object->steps_data[$job_object->step_working]['STEP_TRY']), E_USER_NOTICE);
//check file Size
if (!empty($job_object->job['emailefilesize'])) {
if ($job_object->backup_filesize > $job_object->job['emailefilesize'] * 1024 * 1024) {
$job_object->log(__('Backup archive too big to be sent by email!', 'backwpup'), E_USER_ERROR);
$job_object->substeps_done = 1;
return TRUE;
}
}
$job_object->log(sprintf(__('Sending email to %s…', 'backwpup'), $job_object->job['emailaddress']), E_USER_NOTICE);
//get mail settings
$emailmethod = 'mail';
$emailsendmail = '';
$emailhost = '';
$emailhostport = '';
$emailsecure = '';
$emailuser = '';
$emailpass = '';
if (empty($job_object->job['emailmethod'])) {
//do so if i'm the wp_mail to get the settings
global $phpmailer;
// (Re)create it, if it's gone missing
if (!is_object($phpmailer) || !$phpmailer instanceof PHPMailer) {
require_once ABSPATH . WPINC . '/class-phpmailer.php';
require_once ABSPATH . WPINC . '/class-smtp.php';
$phpmailer = new PHPMailer(true);
}
//only if PHPMailer really used
if (is_object($phpmailer)) {
do_action_ref_array('phpmailer_init', array(&$phpmailer));
//get settings from PHPMailer
$emailmethod = $phpmailer->Mailer;
$emailsendmail = $phpmailer->Sendmail;
$emailhost = $phpmailer->Host;
$emailhostport = $phpmailer->Port;
$emailsecure = $phpmailer->SMTPSecure;
$emailuser = $phpmailer->Username;
$emailpass = $phpmailer->Password;
}
} else {
$emailmethod = $job_object->job['emailmethod'];
$emailsendmail = $job_object->job['emailsendmail'];
$emailhost = $job_object->job['emailhost'];
$emailhostport = $job_object->job['emailhostport'];
$emailsecure = $job_object->job['emailsecure'];
$emailuser = $job_object->job['emailuser'];
$emailpass = BackWPup_Encryption::decrypt($job_object->job['emailpass']);
}
//Generate mail with Swift Mailer
if (!class_exists('Swift', FALSE)) {
require BackWPup::get_plugin_data('plugindir') . '/vendor/SwiftMailer/swift_required.php';
}
if (function_exists('mb_internal_encoding') && (int) ini_get('mbstring.func_overload') & 2) {
$mbEncoding = mb_internal_encoding();
mb_internal_encoding('ASCII');
}
try {
//Set Temp dir for mailing
Swift_Preferences::getInstance()->setTempDir(untrailingslashit(BackWPup::get_plugin_data('TEMP')))->setCacheType('disk');
// Create the Transport
if ($emailmethod == 'smtp') {
$transport = Swift_SmtpTransport::newInstance($emailhost, $emailhostport);
$transport->setUsername($emailuser);
$transport->setPassword($emailpass);
if ($emailsecure == 'ssl') {
$transport->setEncryption('ssl');
}
if ($emailsecure == 'tls') {
$transport->setEncryption('tls');
}
} elseif ($emailmethod == 'sendmail') {
$transport = Swift_SendmailTransport::newInstance($emailsendmail);
} else {
$job_object->need_free_memory($job_object->backup_filesize * 8);
$transport = Swift_MailTransport::newInstance();
}
// Create the Mailer using your created Transport
$emailer = Swift_Mailer::newInstance($transport);
// Create a message
$message = Swift_Message::newInstance(sprintf(__('BackWPup archive from %1$s: %2$s', 'backwpup'), date_i18n('d-M-Y H:i', $job_object->start_time, TRUE), esc_attr($job_object->job['name'])));
$message->setFrom(array($job_object->job['emailsndemail'] => $job_object->job['emailsndemailname']));
$message->setTo(array($job_object->job['emailaddress']));
$message->setBody(sprintf(__('Backup archive: %s', 'backwpup'), $job_object->backup_file), 'text/plain', strtolower(get_bloginfo('charset')));
$message->attach(Swift_Attachment::fromPath($job_object->backup_folder . $job_object->backup_file, $job_object->get_mime_type($job_object->backup_folder . $job_object->backup_file)));
// Send the message
$result = $emailer->send($message);
} catch (Exception $e) {
$job_object->log('Swift Mailer: ' . $e->getMessage(), E_USER_ERROR);
}
if (isset($mbEncoding)) {
mb_internal_encoding($mbEncoding);
}
if (!isset($result) || !$result) {
//.........这里部分代码省略.........