当前位置: 首页>>代码示例>>PHP>>正文


PHP AmazonS3::list_objects方法代码示例

本文整理汇总了PHP中AmazonS3::list_objects方法的典型用法代码示例。如果您正苦于以下问题:PHP AmazonS3::list_objects方法的具体用法?PHP AmazonS3::list_objects怎么用?PHP AmazonS3::list_objects使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在AmazonS3的用法示例。


在下文中一共展示了AmazonS3::list_objects方法的14个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的PHP代码示例。

示例1: getFiles

 public function getFiles($remotePath)
 {
     $response = $this->_s3->list_objects($this->_bucket, array('prefix' => $remotePath));
     $files = array();
     if ($response->isOK()) {
         $i = 0;
         while ($object = $response->body->Contents[$i]) {
             $i++;
             $files[] = array('file' => (string) $object->Key, 'size' => (int) $object->Size, 'date' => (string) $object->LastModified);
         }
     } else {
         Api_Core_Application::log("Get files " . $remotePath, array('upload', array('header' => $response->header, 'status' => $response->status, 'body' => $response->body)), Api_Component_Log_Logger::LEVEL_ERROR);
     }
     return $files;
 }
开发者ID:otis22,项目名称:reserve-copy-system,代码行数:15,代码来源:Amazon.php

示例2: _list

 /**
  * lists bucket's objects, applying callback to each of them
  *
  * @param mixed $callback first argument of the callback is CFSimpleXML object
  * @param array $params
  */
 protected function _list($callback, $params = array())
 {
     // prepare data for loop
     $bucket = $this->getBucket();
     $baseDir = $this->getBaseDir();
     $marker = '';
     $itemCount = 0;
     $v = false;
     $firstBatch = true;
     do {
         $list = $this->_s3->list_objects($bucket, array('marker' => $marker, 'prefix' => $baseDir));
         if (!is_object($list->body->Contents)) {
             $this->_out->stop("S3 response problem, no content returned");
         }
         $count = $list->body->Contents->count();
         if ($count === 0) {
             if ($firstBatch) {
                 break;
             } else {
                 $this->_out->stop("S3 response problem, not all files returned");
             }
         }
         $this->_itemCount += $count;
         $jobFiles = $this->_out->jobStart("processing information about {$count} remote files");
         // download meta data
         //            $batch = new CFBatchRequest(3);
         //            foreach ($list->body->Contents as $v) {
         //                /** @noinspection PhpUndefinedMethodInspection */
         //                $this->_s3->batch($batch)->get_object_headers($bucket, $v->Key); // Get content-type
         //        }
         //            /** @var $response CFArray */
         //            $response = $this->_s3->batch($batch)->send();
         //            if (!$response->areOK()) {
         //                $this->_out->stop("S3 response problem, meta data not returned");
         //            }
         //            if (count($response) != $count) {
         //                $this->_out->stop("S3 response problem, meta data not returned for all files");
         //            }
         // process received information
         $metaId = 0;
         foreach ($list->body->Contents as $v) {
             switch (true) {
                 case is_array($callback):
                 case is_string($callback):
                     call_user_func($callback, $v, $params);
                     break;
                 case is_callable($callback):
                     /** @var $callback Closure */
                     $callback($v, $params);
                     break;
             }
         }
         $this->_out->jobEnd($jobFiles, "updated info about one batch of files");
         // move to next batch of files
         $marker = $v->Key;
         $firstBatch = false;
     } while ((string) $list->body->IsTruncated == 'true');
 }
开发者ID:dannypenrose,项目名称:xtbackup,代码行数:64,代码来源:S3.php

示例3: syncToS3

 protected function syncToS3($arguments = array(), $options = array())
 {
     list($bucket, $prefix) = explode(':', $arguments['destination']);
     $file_list = sfFinder::type('file')->in($arguments['source']);
     $object_list_response = $this->s3->list_objects($bucket);
     if (!$object_list_response->isOk()) {
         throw new sfException($object_list_response->body->Message);
     }
     if (isset($object_list_response->body->Contents)) {
         foreach ($object_list_response->body->Contents as $object) {
             // var_dump($object->LastModified);
             $object_list[] = $object->Key;
         }
     }
     $files_queued = 0;
     foreach ($file_list as $file) {
         $filename = explode(DIRECTORY_SEPARATOR, $file);
         $filename = array_pop($filename);
         $offset = strpos($file, $arguments['source']);
         $s3_location = substr(str_replace($arguments['source'], '', substr($file, $offset)), 1);
         if (in_array($s3_location, $object_list)) {
             continue;
         }
         $this->s3->batch()->create_object($bucket, $s3_location, array('fileUpload' => $file));
         $files_queued++;
         $this->logSection('file+', $bucket . ':' . $s3_location);
     }
     if ($files_queued <= 0) {
         $this->log('All files have already been synced, no need to upload any files');
         return;
     }
     $upload_response = $this->s3->batch()->send();
     if (!$upload_response->areOk()) {
         throw new sfException($upload_response->body->Message);
     }
     $this->log('Files synced to bucket');
 }
开发者ID:JoshuaEstes,项目名称:sfAmazonPlugin,代码行数:37,代码来源:s3SyncTask.class.php

示例4: getS3ObjectList

 /**
  * Get a list of objects from within a bucket
  * @param string $dir
  * @return array
  */
 public function getS3ObjectList($dir)
 {
     $c['delimiter'] = '/';
     if (!empty($dir) && $dir != '/') {
         $c['prefix'] = $dir;
     }
     $list = array();
     $cps = $this->driver->list_objects($this->bucket, $c);
     foreach ($cps->body->CommonPrefixes as $prefix) {
         if (!empty($prefix->Prefix) && $prefix->Prefix != $dir && $prefix->Prefix != '/') {
             $list[] = (string) $prefix->Prefix;
         }
     }
     $response = $this->driver->get_object_list($this->bucket, $c);
     foreach ($response as $file) {
         $list[] = $file;
     }
     return $list;
 }
开发者ID:nervlin4444,项目名称:modx-cms,代码行数:24,代码来源:mods3mediasource.class.php

示例5: send


//.........这里部分代码省略.........
             if (isset($upload_data['error'])) {
                 // Some kind of error.
                 $this_error = 'Stash error(s): `' . implode(' - ', $upload_data['error']) . '`.';
                 $pb_backupbuddy_destination_errors[] = $this_error;
                 pb_backupbuddy::status('error', $this_error);
                 return false;
             }
             unset($files[$file_id]);
             // Remove from list of files we have not sent yet.
             pb_backupbuddy::status('details', 'Stash success sending file `' . basename($file) . '`. File uploaded and reported to Stash as completed.');
             // Load destination fileoptions.
             pb_backupbuddy::status('details', 'About to load fileoptions data.');
             require_once pb_backupbuddy::plugin_path() . '/classes/fileoptions.php';
             $fileoptions_obj = new pb_backupbuddy_fileoptions(backupbuddy_core::getLogDirectory() . 'fileoptions/send-' . $send_id . '.txt', $read_only = false, $ignore_lock = false, $create_file = false);
             if (true !== ($result = $fileoptions_obj->is_ok())) {
                 pb_backupbuddy::status('error', __('Fatal Error #9034.84838. Unable to access fileoptions data.', 'it-l10n-backupbuddy') . ' Error: ' . $result);
                 return false;
             }
             pb_backupbuddy::status('details', 'Fileoptions data loaded.');
             $fileoptions =& $fileoptions_obj->options;
             // Save stats.
             if (isset($uploaded_speed)) {
                 $fileoptions['write_speed'] = $uploaded_speed;
                 $fileoptions_obj->save();
             }
             //$fileoptions['finish_time'] = time();
             //$fileoptions['status'] = 'success';
             unset($fileoptions_obj);
         }
     }
     // end foreach.
     // BEGIN FILE LIMIT PROCESSING. Enforce archive limits if applicable.
     if ($backup_type == 'full') {
         $limit = $full_archive_limit;
         pb_backupbuddy::status('details', 'Stash full backup archive limit of `' . $limit . '` of type `full` based on destination settings.');
     } elseif ($backup_type == 'db') {
         $limit = $db_archive_limit;
         pb_backupbuddy::status('details', 'Stash database backup archive limit of `' . $limit . '` of type `db` based on destination settings.');
     } elseif ($backup_type == 'files') {
         $limit = $db_archive_limit;
         pb_backupbuddy::status('details', 'Stash database backup archive limit of `' . $limit . '` of type `files` based on destination settings.');
     } else {
         $limit = 0;
         pb_backupbuddy::status('warning', 'Warning #54854895. Stash was unable to determine backup type (reported: `' . $backup_type . '`) so archive limits NOT enforced for this backup.');
     }
     if ($limit > 0) {
         pb_backupbuddy::status('details', 'Stash archive limit enforcement beginning.');
         // S3 object for managing files.
         $s3_manage = new AmazonS3($manage_data['credentials']);
         if ($disable_ssl === true) {
             @$s3_manage->disable_ssl(true);
         }
         // Get file listing.
         $response_manage = $s3_manage->list_objects($manage_data['bucket'], array('prefix' => $manage_data['subkey'] . $remote_path . $backup_type_dir));
         // list all the files in the subscriber account
         // Create array of backups and organize by date
         $prefix = backupbuddy_core::backup_prefix();
         // List backups associated with this site by date.
         $backups = array();
         foreach ($response_manage->body->Contents as $object) {
             $file = str_replace($manage_data['subkey'] . $remote_path . $backup_type_dir, '', $object->Key);
             // Stash stores files in a directory per site so no need to check prefix here! if ( false !== strpos( $file, 'backup-' . $prefix . '-' ) ) { // if backup has this site prefix...
             $backups[$file] = strtotime($object->LastModified);
         }
         arsort($backups);
         pb_backupbuddy::status('details', 'Stash found `' . count($backups) . '` backups of this type when checking archive limits.');
         if (count($backups) > $limit) {
             pb_backupbuddy::status('details', 'More archives (' . count($backups) . ') than limit (' . $limit . ') allows. Trimming...');
             $i = 0;
             $delete_fail_count = 0;
             foreach ($backups as $buname => $butime) {
                 $i++;
                 if ($i > $limit) {
                     pb_backupbuddy::status('details', 'Trimming excess file `' . $buname . '`...');
                     $response = $s3_manage->delete_object($manage_data['bucket'], $manage_data['subkey'] . $remote_path . $backup_type_dir . $buname);
                     if (!$response->isOK()) {
                         pb_backupbuddy::status('details', 'Unable to delete excess Stash file `' . $buname . '`. Details: `' . print_r($response, true) . '`.');
                         $delete_fail_count++;
                     }
                 }
             }
             pb_backupbuddy::status('details', 'Finished trimming excess backups.');
             if ($delete_fail_count !== 0) {
                 $error_message = 'Stash remote limit could not delete ' . $delete_fail_count . ' backups.';
                 pb_backupbuddy::status('error', $error_message);
                 backupbuddy_core::mail_error($error_message);
             }
         }
         pb_backupbuddy::status('details', 'Stash completed archive limiting.');
     } else {
         pb_backupbuddy::status('details', 'No Stash archive file limit to enforce.');
     }
     // End remote backup limit
     if (isset($fileoptions_obj)) {
         unset($fileoptions_obj);
     }
     // END FILE LIMIT PROCESSING.
     // Success if we made it this far.
     return true;
 }
开发者ID:adrianjonmiller,项目名称:animalhealth,代码行数:101,代码来源:init.php

示例6: send


//.........这里部分代码省略.........
             $pb_backupbuddy_destination_errors[] = $this_error;
             pb_backupbuddy::status('error', $this_error);
             return false;
         } else {
             //	pb_backupbuddy::status( 'details', 'Stash file upload speed: ' . ( $response->header['_info']['speed_upload'] / 1024 / 1024 ) . 'MB/sec. This number may be invalid for small file transfers.' );
             pb_backupbuddy::status('details', 'Stash put success. Need to nofity Stash of upload completion. Details: `' . print_r($response, true) . '`.');
         }
         delete_transient('pb_backupbuddy_stashquota_' . $settings['itxapi_username']);
         // Delete quota transient since it probably has changed now.
         // Notify Stash API that things were succesful.
         $done_url = $stash->get_upload_url($file, 'done', $remote_path . $backup_type_dir . basename($file));
         pb_backupbuddy::status('details', 'Notifying Stash of completed upload with done url `' . $done_url . '`.');
         $request = new RequestCore($done_url);
         $response = $request->send_request(true);
         if (!$response->isOK()) {
             $this_error = 'Error #756834682. Could not finalize Stash upload. Response code: `' . $response->get_response_code() . '`; Response body: `' . $response->get_response_body() . '`; Response headers: `' . $response->get_response_header() . '`.';
             $pb_backupbuddy_destination_errors[] = $this_error;
             pb_backupbuddy::status('error', $this_error);
             return false;
         } else {
             // Good server response.
             // See if we got an optional json response.
             $upload_data = @json_decode($response->body, true);
             if (isset($upload_data['error'])) {
                 // Some kind of error.
                 $this_error = 'Stash error(s): `' . implode(' - ', $upload_data['error']) . '`.';
                 $pb_backupbuddy_destination_errors[] = $this_error;
                 pb_backupbuddy::status('error', $this_error);
                 return false;
             }
             unset($files[$file_id]);
             // Remove from list of files we have not sent yet.
             pb_backupbuddy::status('details', 'Stash success sending file `' . basename($file) . '`. File uploaded and reported to Stash as completed.');
         }
         // Enforce archive limits if applicable.
         if ($backup_type == 'full') {
             $limit = $full_archive_limit;
             pb_backupbuddy::status('details', 'Stash full backup archive limit of `' . $limit . '` based on destination settings.');
         } elseif ($backup_type == 'db') {
             $limit = $db_archive_limit;
             pb_backupbuddy::status('details', 'Stash database backup archive limit of `' . $limit . '` based on destination settings.');
         } else {
             $limit = 0;
             pb_backupbuddy::status('error', 'Error #54854895. Stash was unable to determine backup type so archive limits NOT enforced for this backup.');
         }
         if ($limit > 0) {
             pb_backupbuddy::status('details', 'Stash archive limit enforcement beginning.');
             // S3 object for managing files.
             $s3_manage = new AmazonS3($manage_data['credentials']);
             if ($disable_ssl === true) {
                 @$s3_manage->disable_ssl(true);
             }
             // Get file listing.
             $response_manage = $s3_manage->list_objects($manage_data['bucket'], array('prefix' => $manage_data['subkey'] . $remote_path . $backup_type_dir));
             // list all the files in the subscriber account
             // Create array of backups and organize by date
             $prefix = pb_backupbuddy::$classes['core']->backup_prefix();
             // List backups associated with this site by date.
             $backups = array();
             foreach ($response_manage->body->Contents as $object) {
                 $file = str_replace($manage_data['subkey'] . $remote_path . $backup_type_dir, '', $object->Key);
                 // Stash stores files in a directory per site so no need to check prefix here! if ( false !== strpos( $file, 'backup-' . $prefix . '-' ) ) { // if backup has this site prefix...
                 $backups[$file] = strtotime($object->LastModified);
                 //}
             }
             arsort($backups);
             //error_log( 'backups: ' . print_r( $backups, true ) );
             pb_backupbuddy::status('details', 'Stash found `' . count($backups) . '` backups of this type when checking archive limits.');
             if (count($backups) > $limit) {
                 pb_backupbuddy::status('details', 'More archives (' . count($backups) . ') than limit (' . $limit . ') allows. Trimming...');
                 $i = 0;
                 $delete_fail_count = 0;
                 foreach ($backups as $buname => $butime) {
                     $i++;
                     if ($i > $limit) {
                         pb_backupbuddy::status('details', 'Trimming excess file `' . $buname . '`...');
                         $response = $s3_manage->delete_object($manage_data['bucket'], $manage_data['subkey'] . $remote_path . $backup_type_dir . $buname);
                         if (!$response->isOK()) {
                             pb_backupbuddy::status('details', 'Unable to delete excess Stash file `' . $buname . '`. Details: `' . print_r($response, true) . '`.');
                             $delete_fail_count++;
                         }
                     }
                 }
                 pb_backupbuddy::status('details', 'Finished trimming excess backups.');
                 if ($delete_fail_count !== 0) {
                     $error_message = 'Stash remote limit could not delete ' . $delete_fail_count . ' backups.';
                     pb_backupbuddy::status('error', $error_message);
                     pb_backupbuddy::$classes['core']->mail_error($error_message);
                 }
             }
             pb_backupbuddy::status('details', 'Stash completed archive limiting.');
         } else {
             pb_backupbuddy::status('details', 'No Stash archive file limit to enforce.');
         }
         // End remote backup limit
     }
     // end foreach.
     // Success if we made it this far.
     return true;
 }
开发者ID:CherylMuniz,项目名称:fashion,代码行数:101,代码来源:init.php

示例7: job_run_archive

 /**
  * @param $job_object
  * @return bool
  */
 public function job_run_archive(&$job_object)
 {
     $job_object->substeps_todo = 2 + $job_object->backup_filesize;
     $job_object->log(sprintf(__('%d. Trying to send backup file to S3 Service&#160;&hellip;', 'backwpup'), $job_object->steps_data[$job_object->step_working]['STEP_TRY']), E_USER_NOTICE);
     try {
         $s3 = new AmazonS3(array('key' => $job_object->job['s3accesskey'], 'secret' => BackWPup_Encryption::decrypt($job_object->job['s3secretkey']), 'certificate_authority' => TRUE));
         $base_url = $this->get_s3_base_url($job_object->job['s3region'], $job_object->job['s3base_url']);
         if (stristr($base_url, 'amazonaws.com')) {
             $s3->set_region(str_replace(array('http://', 'https://'), '', $base_url));
         } else {
             $s3->set_hostname(str_replace(array('http://', 'https://'), '', $base_url));
             $s3->allow_hostname_override(FALSE);
             if (substr($base_url, -1) == '/') {
                 $s3->enable_path_style(TRUE);
             }
         }
         if (stristr($base_url, 'http://')) {
             $s3->disable_ssl();
         }
         if ($s3->if_bucket_exists($job_object->job['s3bucket'])) {
             $job_object->log(sprintf(__('Connected to S3 Bucket "%1$s" in %2$s', 'backwpup'), $job_object->job['s3bucket'], $base_url), E_USER_NOTICE);
         } else {
             $job_object->log(sprintf(__('S3 Bucket "%s" does not exist!', 'backwpup'), $job_object->job['s3bucket']), E_USER_ERROR);
             return TRUE;
         }
         //transfer file to S3
         $job_object->log(__('Starting upload to S3 Service&#160;&hellip;', 'backwpup'), E_USER_NOTICE);
         //Transfer Backup to S3
         if ($job_object->job['s3storageclass'] == 'REDUCED_REDUNDANCY') {
             //set reduced redundancy or not
             $storage = AmazonS3::STORAGE_REDUCED;
         } else {
             $storage = AmazonS3::STORAGE_STANDARD;
         }
         if (empty($job_object->job['s3ssencrypt'])) {
             $job_object->job['s3ssencrypt'] = NULL;
         }
         //set progress bar
         $s3->register_streaming_read_callback(array($job_object, 'curl_read_callback'));
         $result = $s3->create_object($job_object->job['s3bucket'], $job_object->job['s3dir'] . $job_object->backup_file, array('fileUpload' => $job_object->backup_folder . $job_object->backup_file, 'acl' => AmazonS3::ACL_PRIVATE, 'storage' => $storage, 'encryption' => $job_object->job['s3ssencrypt']));
         if ($result->status >= 200 and $result->status < 300) {
             $job_object->substeps_done = 1 + $job_object->backup_filesize;
             $job_object->log(sprintf(__('Backup transferred to %s.', 'backwpup'), $this->get_s3_base_url($job_object->job['s3region'], $job_object->job['s3base_url']) . '/' . $job_object->job['s3bucket'] . '/' . $job_object->job['s3dir'] . $job_object->backup_file), E_USER_NOTICE);
             if (!empty($job_object->job['jobid'])) {
                 BackWPup_Option::update($job_object->job['jobid'], 'lastbackupdownloadurl', network_admin_url('admin.php') . '?page=backwpupbackups&action=downloads3&file=' . $job_object->job['s3dir'] . $job_object->backup_file . '&jobid=' . $job_object->job['jobid']);
             }
         } else {
             $job_object->log(sprintf(__('Cannot transfer backup to S3! (%1$d) %2$s', 'backwpup'), $result->status, $result->body), E_USER_ERROR);
         }
     } catch (Exception $e) {
         $job_object->log(E_USER_ERROR, sprintf(__('S3 Service API: %s', 'backwpup'), htmlentities($e->getMessage())), $e->getFile(), $e->getLine());
         return FALSE;
     }
     try {
         $backupfilelist = array();
         $filecounter = 0;
         $files = array();
         $objects = $s3->list_objects($job_object->job['s3bucket'], array('prefix' => $job_object->job['s3dir']));
         if (is_object($objects)) {
             foreach ($objects->body->Contents as $object) {
                 $file = basename((string) $object->Key);
                 $changetime = strtotime((string) $object->LastModified) + get_option('gmt_offset') * 3600;
                 if ($job_object->is_backup_archive($file)) {
                     $backupfilelist[$changetime] = $file;
                 }
                 $files[$filecounter]['folder'] = $this->get_s3_base_url($job_object->job['s3region'], $job_object->job['s3base_url']) . '/' . $job_object->job['s3bucket'] . '/' . dirname((string) $object->Key);
                 $files[$filecounter]['file'] = (string) $object->Key;
                 $files[$filecounter]['filename'] = basename($object->Key);
                 $files[$filecounter]['downloadurl'] = network_admin_url('admin.php') . '?page=backwpupbackups&action=downloads3&file=' . (string) $object->Key . '&jobid=' . $job_object->job['jobid'];
                 $files[$filecounter]['filesize'] = (int) $object->Size;
                 $files[$filecounter]['time'] = $changetime;
                 $filecounter++;
             }
         }
         if ($job_object->job['s3maxbackups'] > 0 && is_object($s3)) {
             //Delete old backups
             if (count($backupfilelist) > $job_object->job['s3maxbackups']) {
                 ksort($backupfilelist);
                 $numdeltefiles = 0;
                 while ($file = array_shift($backupfilelist)) {
                     if (count($backupfilelist) < $job_object->job['s3maxbackups']) {
                         break;
                     }
                     //delete files on S3
                     $delete_s3 = $s3->delete_object($job_object->job['s3bucket'], $job_object->job['s3dir'] . $file);
                     if ($delete_s3) {
                         foreach ($files as $key => $filedata) {
                             if ($filedata['file'] == $job_object->job['s3dir'] . $file) {
                                 unset($files[$key]);
                             }
                         }
                         $numdeltefiles++;
                     } else {
                         $job_object->log(sprintf(__('Cannot delete backup from %s.', 'backwpup'), $this->get_s3_base_url($job_object->job['s3region'], $job_object->job['s3base_url']) . '/' . $job_object->job['s3bucket'] . '/' . $job_object->job['s3dir'] . $file), E_USER_ERROR);
                     }
                 }
//.........这里部分代码省略.........
开发者ID:agileontheweb,项目名称:xvp,代码行数:101,代码来源:class-destination-s3-v1.php

示例8: htmlentities

';
echo '<br><br></div>';
// Welcome text.
$up_path = '/';
if ($settings['manage_all_files'] == '1') {
    $manage_all_link = ' <a href="' . pb_backupbuddy::ajax_url('remoteClient') . '&destination_id=' . htmlentities(pb_backupbuddy::_GET('destination_id')) . '&remote_path=' . $up_path . '" style="text-decoration: none; margin-left: 15px;" title="By default, Stash will display files in the Stash directory for this particular site. Clicking this will display files for all your sites in Stash.">List files for all sites</a>';
} else {
    $manage_all_link = '<!-- manage all disabled based on settings -->';
    if ($remote_path == '/') {
        die('Access denied. Possible hacking attempt has been logged. Error #5549450.');
    }
}
$reauth_link = ' <a href="' . pb_backupbuddy::ajax_url('remoteClient') . '&destination_id=' . htmlentities(pb_backupbuddy::_GET('destination_id')) . '&force_stash_reauth=1" style="text-decoration: none; margin-left: 15px;" title="Re-authenticate to Stash or change the Stash account this Stash destination uses.">Re-authenticate</a>';
echo '<div style="font-size: 12px; text-align: center;"><b>Current Remote Directory</b>: ' . $remote_path . $manage_all_link . $reauth_link . '</div>';
// Get file listing.
$response = $s3->list_objects($manage_data['bucket'], array('prefix' => $manage_data['subkey'] . $remote_path));
// list all the files in the subscriber account
/*
echo '<pre>';
print_r( $response );
echo '</pre>';
*/
// Display prefix somewhere to aid in troubleshooting/support.
$subscriber_prefix = substr($response->body->Prefix, 0, strpos($response->body->Prefix, '/'));
// Get list of files.
$backup_list_temp = array();
foreach ($response->body->Contents as $object) {
    $file = str_ireplace($manage_data['subkey'] . $remote_path, '', $object->Key);
    $last_modified = strtotime($object->LastModified);
    $size = (double) $object->Size;
    if (substr($file, 0, 3) == 'db/') {
开发者ID:AgilData,项目名称:WordPress-Skeleton,代码行数:31,代码来源:_manage.php

示例9:

#!/usr/bin/php
<?php 
/*
 * list_bucket_objects_raw.php
 *
 * Display the raw bucket data returned by list_objects
 *
 * Copyright 2009-2010 Amazon.com, Inc. or its affiliates. All Rights
 * Reserved.
 *
 * Licensed under the Apache License, Version 2.0 (the "License"). You
 * may not use this file except in compliance with the License. A copy
 * of the License is located at
 *
 *       http://aws.amazon.com/apache2.0/
 *
 * or in the "license.txt" file accompanying this file. This file is
 * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
 * OF ANY KIND, either express or implied. See the License for the
 * specific language governing permissions and limitations under the
 * License.
 */
error_reporting(E_ALL);
require_once 'sdk.class.php';
require_once 'include/book.inc.php';
// Create the S3 access object
$s3 = new AmazonS3();
// List the bucket
$res = $s3->list_objects(BOOK_BUCKET);
// Display the resulting object tree
print_r($res);
开发者ID:websider,项目名称:amazon-web-services,代码行数:31,代码来源:list_bucket_objects_raw.php

示例10: dest_s3

function dest_s3()
{
    global $WORKING, $STATIC;
    trigger_error(sprintf(__('%d. try sending backup file to Amazon S3...', 'backwpup'), $WORKING['DEST_S3']['STEP_TRY']), E_USER_NOTICE);
    $WORKING['STEPTODO'] = 2 + filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']);
    $WORKING['STEPDONE'] = 0;
    require_once dirname(__FILE__) . '/../libs/aws/sdk.class.php';
    need_free_memory(26214400 * 1.1);
    try {
        $s3 = new AmazonS3(array('key' => $STATIC['JOB']['awsAccessKey'], 'secret' => $STATIC['JOB']['awsSecretKey'], 'certificate_authority' => true));
        if ($s3->if_bucket_exists($STATIC['JOB']['awsBucket'])) {
            trigger_error(sprintf(__('Connected to S3 Bucket: %s', 'backwpup'), $STATIC['JOB']['awsBucket']), E_USER_NOTICE);
            //Transfer Backup to S3
            if ($STATIC['JOB']['awsrrs']) {
                //set reduced redundancy or not
                $storage = AmazonS3::STORAGE_REDUCED;
            } else {
                $storage = AmazonS3::STORAGE_STANDARD;
            }
            //set curl Progress bar
            $curlops = array();
            if (defined('CURLOPT_PROGRESSFUNCTION')) {
                $curlops = array(CURLOPT_NOPROGRESS => false, CURLOPT_PROGRESSFUNCTION => 'curl_progresscallback', CURLOPT_BUFFERSIZE => 1048576);
            }
            trigger_error(__('Upload to Amazon S3 now started... ', 'backwpup'), E_USER_NOTICE);
            //transferee file to S3
            $result = $s3->create_object($STATIC['JOB']['awsBucket'], $STATIC['JOB']['awsdir'] . $STATIC['backupfile'], array('fileUpload' => $STATIC['JOB']['backupdir'] . $STATIC['backupfile'], 'acl' => AmazonS3::ACL_PRIVATE, 'storage' => $storage, 'curlopts' => $curlops));
            $result = (array) $result;
            if ($result["status"] >= 200 and $result["status"] < 300) {
                $WORKING['STEPTODO'] = 1 + filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']);
                trigger_error(sprintf(__('Backup transferred to %s', 'backwpup'), $result["header"]["_info"]["url"]), E_USER_NOTICE);
                $STATIC['JOB']['lastbackupdownloadurl'] = $STATIC['WP']['ADMINURL'] . '?page=backwpupbackups&action=downloads3&file=' . $STATIC['JOB']['awsdir'] . $STATIC['backupfile'] . '&jobid=' . $STATIC['JOB']['jobid'];
                $WORKING['STEPSDONE'][] = 'DEST_S3';
                //set done
            } else {
                trigger_error(sprintf(__('Can not transfer backup to S3! (%1$d) %2$s', 'backwpup'), $result["status"], $result["Message"]), E_USER_ERROR);
            }
        } else {
            trigger_error(sprintf(__('S3 Bucket "%s" not exists!', 'backwpup'), $STATIC['JOB']['awsBucket']), E_USER_ERROR);
        }
    } catch (Exception $e) {
        trigger_error(sprintf(__('Amazon API: %s', 'backwpup'), $e->getMessage()), E_USER_ERROR);
        return;
    }
    try {
        if ($s3->if_bucket_exists($STATIC['JOB']['awsBucket'])) {
            if ($STATIC['JOB']['awsmaxbackups'] > 0) {
                //Delete old backups
                $backupfilelist = array();
                if (($contents = $s3->list_objects($STATIC['JOB']['awsBucket'], array('prefix' => $STATIC['JOB']['awsdir']))) !== false) {
                    foreach ($contents->body->Contents as $object) {
                        $file = basename($object->Key);
                        if ($STATIC['JOB']['fileprefix'] == substr($file, 0, strlen($STATIC['JOB']['fileprefix'])) and $STATIC['JOB']['fileformart'] == substr($file, -strlen($STATIC['JOB']['fileformart']))) {
                            $backupfilelist[] = $file;
                        }
                    }
                }
                if (sizeof($backupfilelist) > 0) {
                    rsort($backupfilelist);
                    $numdeltefiles = 0;
                    for ($i = $STATIC['JOB']['awsmaxbackups']; $i < sizeof($backupfilelist); $i++) {
                        if ($s3->delete_object($STATIC['JOB']['awsBucket'], $STATIC['JOB']['awsdir'] . $backupfilelist[$i])) {
                            //delte files on S3
                            $numdeltefiles++;
                        } else {
                            trigger_error(sprintf(__('Can not delete backup on S3://%s', 'backwpup'), $STATIC['JOB']['awsBucket'] . '/' . $STATIC['JOB']['awsdir'] . $backupfilelist[$i]), E_USER_ERROR);
                        }
                    }
                    if ($numdeltefiles > 0) {
                        trigger_error(sprintf(_n('One file deleted on S3 Bucket', '%d files deleted on S3 Bucket', $numdeltefiles, 'backwpup'), $numdeltefiles), E_USER_NOTICE);
                    }
                }
            }
        }
    } catch (Exception $e) {
        trigger_error(sprintf(__('Amazon API: %s', 'backwpup'), $e->getMessage()), E_USER_ERROR);
        return;
    }
    $WORKING['STEPDONE']++;
}
开发者ID:hscale,项目名称:webento,代码行数:80,代码来源:dest_s3.php

示例11: dest_gstorage

function dest_gstorage()
{
    global $WORKING, $STATIC;
    trigger_error(sprintf(__('%d. try sending backup to Google Storage...', 'backwpup'), $WORKING['DEST_GSTORAGE']['STEP_TRY']), E_USER_NOTICE);
    $WORKING['STEPTODO'] = 2 + filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']);
    $WORKING['STEPDONE'] = 0;
    require_once dirname(__FILE__) . '/../libs/aws/sdk.class.php';
    need_free_memory(26214400 * 1.1);
    try {
        $gstorage = new AmazonS3(array('key' => $STATIC['JOB']['GStorageAccessKey'], 'secret' => $STATIC['JOB']['GStorageSecret'], 'certificate_authority' => true));
        //set up s3 for google
        $gstorage->set_hostname('storage.googleapis.com');
        $gstorage->allow_hostname_override(false);
        if ($gstorage->if_bucket_exists($STATIC['JOB']['GStorageBucket'])) {
            trigger_error(sprintf(__('Connected to GStorage Bucket: %s', 'backwpup'), $STATIC['JOB']['GStorageBucket']), E_USER_NOTICE);
            //set curl Prozess bar
            $curlops = array();
            if (defined('CURLOPT_PROGRESSFUNCTION')) {
                $curlops = array(CURLOPT_NOPROGRESS => false, CURLOPT_PROGRESSFUNCTION => 'curl_progresscallback', CURLOPT_BUFFERSIZE => 1048576);
            }
            trigger_error(__('Upload to GStorage now started... ', 'backwpup'), E_USER_NOTICE);
            //transferee file to GStorage
            $result = $gstorage->create_object($STATIC['JOB']['GStorageBucket'], $STATIC['JOB']['GStoragedir'] . $STATIC['backupfile'], array('fileUpload' => $STATIC['JOB']['backupdir'] . $STATIC['backupfile'], 'acl' => 'private', 'curlopts' => $curlops));
            $result = (array) $result;
            if ($result["status"] >= 200 and $result["status"] < 300) {
                $WORKING['STEPTODO'] = 1 + filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']);
                trigger_error(sprintf(__('Backup transferred to %s', 'backwpup'), "https://storage.cloud.google.com/" . $STATIC['JOB']['GStorageBucket'] . "/" . $STATIC['JOB']['GStoragedir'] . $STATIC['backupfile']), E_USER_NOTICE);
                $STATIC['JOB']['lastbackupdownloadurl'] = "https://storage.cloud.google.com/" . $STATIC['JOB']['GStorageBucket'] . "/" . $STATIC['JOB']['GStoragedir'] . $STATIC['backupfile'];
                $WORKING['STEPSDONE'][] = 'DEST_GSTORAGE';
                //set done
            } else {
                trigger_error(sprintf(__('Can not transfer backup to GStorage! (%1$d) %2$s', 'backwpup'), $result["status"], $result["Message"]), E_USER_ERROR);
            }
        } else {
            trigger_error(sprintf(__('GStorage Bucket "%s" not exists!', 'backwpup'), $STATIC['JOB']['GStorageBucket']), E_USER_ERROR);
        }
    } catch (Exception $e) {
        trigger_error(sprintf(__('GStorage API: %s', 'backwpup'), $e->getMessage()), E_USER_ERROR);
        return;
    }
    try {
        if ($gstorage->if_bucket_exists($STATIC['JOB']['GStorageBucket'])) {
            if ($STATIC['JOB']['GStoragemaxbackups'] > 0) {
                //Delete old backups
                $backupfilelist = array();
                if (($contents = $gstorage->list_objects($STATIC['JOB']['GStorageBucket'], array('prefix' => $STATIC['JOB']['GStoragedir']))) !== false) {
                    foreach ($contents->body->Contents as $object) {
                        $file = basename($object->Key);
                        if ($STATIC['JOB']['fileprefix'] == substr($file, 0, strlen($STATIC['JOB']['fileprefix'])) and $STATIC['JOB']['fileformart'] == substr($file, -strlen($STATIC['JOB']['fileformart']))) {
                            $backupfilelist[] = $file;
                        }
                    }
                }
                if (sizeof($backupfilelist) > 0) {
                    rsort($backupfilelist);
                    $numdeltefiles = 0;
                    for ($i = $STATIC['JOB']['GStoragemaxbackups']; $i < sizeof($backupfilelist); $i++) {
                        if ($gstorage->delete_object($STATIC['JOB']['GStorageBucket'], $STATIC['JOB']['GStoragedir'] . $backupfilelist[$i])) {
                            //delte files on S3
                            $numdeltefiles++;
                        } else {
                            trigger_error(sprintf(__('Can not delete backup on GStorage://%s', 'backwpup'), $STATIC['JOB']['awsBucket'] . '/' . $STATIC['JOB']['GStoragedir'] . $backupfilelist[$i]), E_USER_ERROR);
                        }
                    }
                    if ($numdeltefiles > 0) {
                        trigger_error(sprintf(_n('One file deleted on GStorage Bucket', '%d files deleted on GStorage Bucket', $numdeltefiles, 'backwpup'), $numdeltefiles), E_USER_NOTICE);
                    }
                }
            }
        }
    } catch (Exception $e) {
        trigger_error(sprintf(__('GStorage API: %s', 'backwpup'), $e->getMessage()), E_USER_ERROR);
        return;
    }
    $WORKING['STEPDONE']++;
}
开发者ID:hscale,项目名称:webento,代码行数:76,代码来源:dest_gstorage.php

示例12: die

}
if (false !== $stashDestination) {
    require_once pb_backupbuddy::plugin_path() . '/destinations/stash/lib/class.itx_helper.php';
    require_once pb_backupbuddy::plugin_path() . '/destinations/stash/init.php';
    //$stash = new ITXAPI_Helper( pb_backupbuddy_destination_stash::ITXAPI_KEY, pb_backupbuddy_destination_stash::ITXAPI_URL, pb_backupbuddy::$options['remote_destinations'][ $stashDestination ]['itxapi_username'], pb_backupbuddy::$options['remote_destinations'][ $stashDestination ]['itxapi_password'] );
    $manage_data = pb_backupbuddy_destination_stash::get_manage_data(pb_backupbuddy::$options['remote_destinations'][$stashDestination]);
    // Connect to S3.
    if (!is_array($manage_data['credentials'])) {
        die('Error #8484383c: Your authentication credentials for Stash failed. Verify your login and password to Stash. You may need to update the Stash destination settings. Perhaps you recently changed your password?');
    }
    $s3 = new AmazonS3($manage_data['credentials']);
    // the key, secret, token
    if (pb_backupbuddy::$options['remote_destinations'][$stashDestination]['ssl'] == '0') {
        @$s3->disable_ssl(true);
    }
    $response = $s3->list_objects($manage_data['bucket'], array('prefix' => $manage_data['subkey'] . '/deploy'));
    // list all the files in the subscriber account
    echo '<pre>';
    print_r($response);
    echo '</pre>';
    foreach ($response->body->Contents as $object) {
        print_r($object);
        echo '<br><br>';
        echo 'Bucket: ' . $manage_data['bucket'] . '<br>';
        echo 'Key: ' . $object->Key . '<br>';
        $metadata = $s3->get_object_metadata($manage_data['bucket'], $object->Key);
        //$metadata = $s3->get_object_metadata( "storage-api-ithemes", "y3xw057s35zp6s4i/deploy-backupbuddy.dat" );
        if (false === $metadata) {
            echo 'Meta result was FALSE.';
            print_r($metadata);
        } else {
开发者ID:Ezyva2015,项目名称:SMSF-Academy-Wordpress,代码行数:31,代码来源:_deployments.php

示例13: backwpup_get_backup_files

function backwpup_get_backup_files($jobid, $dest)
{
    global $backwpup_message;
    if (empty($jobid) or !in_array(strtoupper($dest), explode(',', strtoupper(BACKWPUP_DESTS))) and $dest != 'FOLDER') {
        return false;
    }
    $jobs = get_option('backwpup_jobs');
    //Load jobs
    $jobvalue = $jobs[$jobid];
    $filecounter = 0;
    $files = array();
    //Get files/filinfo in backup folder
    if ($dest == 'FOLDER' and !empty($jobvalue['backupdir']) and is_dir($jobvalue['backupdir'])) {
        if ($dir = opendir($jobvalue['backupdir'])) {
            while (($file = readdir($dir)) !== false) {
                if (substr($file, 0, 1) == '.') {
                    continue;
                }
                if (is_file($jobvalue['backupdir'] . $file)) {
                    $files[$filecounter]['JOBID'] = $jobid;
                    $files[$filecounter]['DEST'] = $dest;
                    $files[$filecounter]['folder'] = $jobvalue['backupdir'];
                    $files[$filecounter]['file'] = $jobvalue['backupdir'] . $file;
                    $files[$filecounter]['filename'] = $file;
                    $files[$filecounter]['downloadurl'] = backwpup_admin_url('admin.php') . '?page=backwpupbackups&action=download&file=' . $jobvalue['backupdir'] . $file;
                    $files[$filecounter]['filesize'] = filesize($jobvalue['backupdir'] . $file);
                    $files[$filecounter]['time'] = filemtime($jobvalue['backupdir'] . $file);
                    $filecounter++;
                }
            }
            closedir($dir);
        }
    }
    //Get files/filinfo from Dropbox
    if ($dest == 'DROPBOX' and !empty($jobvalue['dropetoken']) and !empty($jobvalue['dropesecret'])) {
        require_once realpath(dirname(__FILE__) . '/../libs/dropbox.php');
        try {
            $dropbox = new backwpup_Dropbox('dropbox');
            $dropbox->setOAuthTokens($jobvalue['dropetoken'], $jobvalue['dropesecret']);
            $contents = $dropbox->metadata($jobvalue['dropedir']);
            if (is_array($contents)) {
                foreach ($contents['contents'] as $object) {
                    if ($object['is_dir'] != true) {
                        $files[$filecounter]['JOBID'] = $jobid;
                        $files[$filecounter]['DEST'] = $dest;
                        $files[$filecounter]['folder'] = "https://api-content.dropbox.com/1/files/" . $jobvalue['droperoot'] . "/" . dirname($object['path']) . "/";
                        $files[$filecounter]['file'] = $object['path'];
                        $files[$filecounter]['filename'] = basename($object['path']);
                        $files[$filecounter]['downloadurl'] = backwpup_admin_url('admin.php') . '?page=backwpupbackups&action=downloaddropbox&file=' . $object['path'] . '&jobid=' . $jobid;
                        $files[$filecounter]['filesize'] = $object['bytes'];
                        $files[$filecounter]['time'] = strtotime($object['modified']);
                        $filecounter++;
                    }
                }
            }
        } catch (Exception $e) {
            $backwpup_message .= 'DROPBOX: ' . $e->getMessage() . '<br />';
        }
    }
    //Get files/filinfo from Sugarsync
    if ($dest == 'SUGARSYNC' and !empty($jobvalue['sugarrefreshtoken'])) {
        if (!class_exists('SugarSync')) {
            require_once dirname(__FILE__) . '/../libs/sugarsync.php';
        }
        if (class_exists('SugarSync')) {
            try {
                $sugarsync = new SugarSync($jobvalue['sugarrefreshtoken']);
                $dirid = $sugarsync->chdir($jobvalue['sugardir'], $jobvalue['sugarroot']);
                $user = $sugarsync->user();
                $dir = $sugarsync->showdir($dirid);
                $getfiles = $sugarsync->getcontents('file');
                if (is_object($getfiles)) {
                    foreach ($getfiles->file as $getfile) {
                        $files[$filecounter]['JOBID'] = $jobid;
                        $files[$filecounter]['DEST'] = $dest;
                        $files[$filecounter]['folder'] = 'https://' . $user->nickname . '.sugarsync.com/' . $dir;
                        $files[$filecounter]['file'] = (string) $getfile->ref;
                        $files[$filecounter]['filename'] = utf8_decode((string) $getfile->displayName);
                        $files[$filecounter]['downloadurl'] = backwpup_admin_url('admin.php') . '?page=backwpupbackups&action=downloadsugarsync&file=' . (string) $getfile->ref . '&jobid=' . $jobid;
                        $files[$filecounter]['filesize'] = (int) $getfile->size;
                        $files[$filecounter]['time'] = strtotime((string) $getfile->lastModified);
                        $filecounter++;
                    }
                }
            } catch (Exception $e) {
                $backwpup_message .= 'SUGARSYNC: ' . $e->getMessage() . '<br />';
            }
        }
    }
    //Get files/filinfo from S3
    if ($dest == 'S3' and !empty($jobvalue['awsAccessKey']) and !empty($jobvalue['awsSecretKey']) and !empty($jobvalue['awsBucket'])) {
        if (!class_exists('AmazonS3')) {
            require_once dirname(__FILE__) . '/../libs/aws/sdk.class.php';
        }
        if (class_exists('AmazonS3')) {
            try {
                $s3 = new AmazonS3(array('key' => $jobvalue['awsAccessKey'], 'secret' => $jobvalue['awsSecretKey'], 'certificate_authority' => true));
                if (($contents = $s3->list_objects($jobvalue['awsBucket'], array('prefix' => $jobvalue['awsdir']))) !== false) {
                    foreach ($contents->body->Contents as $object) {
                        $files[$filecounter]['JOBID'] = $jobid;
//.........这里部分代码省略.........
开发者ID:hscale,项目名称:webento,代码行数:101,代码来源:func_backwpupbackups.php

示例14: exit

 *
 * Copyright 2009-2010 Amazon.com, Inc. or its affiliates. All Rights
 * Reserved.
 *
 * Licensed under the Apache License, Version 2.0 (the "License"). You
 * may not use this file except in compliance with the License. A copy
 * of the License is located at
 *
 *       http://aws.amazon.com/apache2.0/
 *
 * or in the "license.txt" file accompanying this file. This file is
 * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
 * OF ANY KIND, either express or implied. See the License for the
 * specific language governing permissions and limitations under the
 * License.
 *
 * Modified by Jeffrey S. Haemer <jeffrey.haemer@gmail.com>
 */
error_reporting(E_ALL);
require_once 'AWSSDKforPHP/sdk.class.php';
require_once 'include/book.inc.php';
if ($argc != 2) {
    exit("Usage: " . $argv[0] . " bucket_name\n");
}
$bucket = $argv[1] == '-' ? BOOK_BUCKET : $argv[1];
// Create the S3 access object
$s3 = new AmazonS3();
// List the bucket
$res = $s3->list_objects($bucket);
// Display the resulting object tree
print_r($res);
开发者ID:jsh,项目名称:BarrBookCode,代码行数:31,代码来源:list_bucket_objects_raw.php


注:本文中的AmazonS3::list_objects方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。