当前位置: 首页>>代码示例>>PHP>>正文


PHP AmazonS3::create_mpu_object方法代码示例

本文整理汇总了PHP中AmazonS3::create_mpu_object方法的典型用法代码示例。如果您正苦于以下问题:PHP AmazonS3::create_mpu_object方法的具体用法?PHP AmazonS3::create_mpu_object怎么用?PHP AmazonS3::create_mpu_object使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在AmazonS3的用法示例。


在下文中一共展示了AmazonS3::create_mpu_object方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的PHP代码示例。

示例1: updateRemote

 public function updateRemote($myrole, $drivers)
 {
     if ($this->_options['update'] == 'simulate') {
         $simulate = true;
         $this->_out->logWarning("only SIMULATION mode");
     } else {
         if ($this->_options['update'] === false || (int) $this->_options['update'] === 0) {
             $this->_out->logNotice("skipped, not requested and not needed");
             return;
         }
         $simulate = false;
     }
     /** @var $compare Compare_Interface */
     $compare = $drivers['compare'];
     /** @var $local Storage_Interface */
     $local = $drivers['local'];
     if (!$compare->initChangesOn("remote")) {
         // TODO not sure, but maybe we will need it
     }
     $job = $this->_out->jobStart("updating remote storage");
     $this->_out->jobSetProgressStep($job, 1000);
     foreach ($compare as $task) {
         $repeat = 3;
         do {
             $msg = "";
             try {
                 $path = $this->_getPathWithBasedir($task->path, self::ADD_BASE_DIR);
                 switch ($task->action) {
                     case Compare_Interface::CMD_MKDIR:
                         $msg = "mkdir " . $path . " into s3 bucket";
                         $this->_out->logDebug($msg);
                         if (!$simulate) {
                             // create folders
                             $this->_s3->create_object($this->getBucket(), $path, array('body' => '', 'storage' => $this->_defaultRedundancyStorage));
                         }
                         break;
                     case Compare_Interface::CMD_PUT:
                         $msg = "put " . $path . " into s3 bucket";
                         $this->_out->logDebug($msg);
                         $uploadPath = $local->getBaseDir() . $task->path;
                         //fix for windows encoding issue
                         $uploadPath = $local->convertEncodingPath($uploadPath);
                         if (!file_exists($uploadPath)) {
                             $this->_out->logError("file {$uploadPath} does not exists anymore locally");
                             continue;
                         }
                         if (!$simulate) {
                             //empty directory
                             if (ord(substr($path, -1)) === 47) {
                                 //for empty folders we need little different options
                                 $this->_out->logWarning("TODO putting empty folder {$path} ... is it possible ?");
                                 $this->_s3->create_object($this->getBucket(), $path, array('body' => '', 'storage' => $this->_defaultRedundancyStorage));
                             } else {
                                 $options = array('fileUpload' => $uploadPath, 'storage' => $this->_defaultRedundancyStorage);
                                 // TODO it should be possible to speedup upload of small upload but using S3 batch
                                 if ($this->_options['multipart']['big-files']) {
                                     // multipart upload for big files
                                     if ($this->_options['multipart']['part-size']) {
                                         $options['partSize'] = $this->_options['multipart']['part-size'];
                                     }
                                     $this->_s3->create_mpu_object($this->getBucket(), $path, $options);
                                 } else {
                                     // normal upload
                                     $this->_s3->create_object($this->getBucket(), $path, $options);
                                 }
                             }
                         }
                         break;
                     case Compare_Interface::CMD_DELETE:
                         $msg = "deleting " . $path . " from s3 bucket";
                         $this->_out->logDebug($msg);
                         if (!$simulate) {
                             $this->_s3->delete_object($this->getBucket(), $path);
                         }
                         break;
                     case Compare_Interface::CMD_TS:
                         // storing this information as metadata is too slow to be used
                         //                        $this->_out->logDebug("remember local timestamp for " . $path . " into s3 bucket");
                         //                        if (!$simulate) {
                         //                            $this->_s3->update_object(
                         //                                $this->getBucket(), $path,
                         //                                array(
                         //                                     'meta' => array('localts' => $task->ltime),
                         //                                )
                         //                            );
                         //                        }
                         break;
                     default:
                         $this->_out->logError("ignored command {$task->action}");
                 }
                 $repeat = 0;
             } catch (Exception $e) {
                 $repeat--;
                 if ($repeat) {
                     $this->_out->logError("need to repeat: {$msg}");
                 } else {
                     if ($msg) {
                         $this->_out->logError($msg);
                     }
                     throw new Exception($e->getMessage(), $e->getCode());
//.........这里部分代码省略.........
开发者ID:dannypenrose,项目名称:xtbackup,代码行数:101,代码来源:S3.php

示例2: dest_gstorage

function dest_gstorage()
{
    global $WORKING, $STATIC;
    trigger_error(sprintf(__('%d. try sending backup to Google Storage...', 'backwpup'), $WORKING['DEST_GSTORAGE']['STEP_TRY']), E_USER_NOTICE);
    $WORKING['STEPTODO'] = 2 + filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']);
    $WORKING['STEPDONE'] = 0;
    require_once dirname(__FILE__) . '/../libs/aws/sdk.class.php';
    need_free_memory(26214400 * 1.1);
    try {
        $gstorage = new AmazonS3(array('key' => $STATIC['JOB']['GStorageAccessKey'], 'secret' => $STATIC['JOB']['GStorageSecret'], 'certificate_authority' => true));
        //set up s3 for google
        $gstorage->set_hostname('commondatastorage.googleapis.com');
        $gstorage->allow_hostname_override(false);
        if ($gstorage->if_bucket_exists($STATIC['JOB']['GStorageBucket'])) {
            trigger_error(sprintf(__('Connected to GStorage Bucket: %s', 'backwpup'), $STATIC['JOB']['GStorageBucket']), E_USER_NOTICE);
            //set surl Prozess bar
            $curlops = array();
            if (defined('CURLOPT_PROGRESSFUNCTION')) {
                $curlops = array(CURLOPT_NOPROGRESS => false, CURLOPT_PROGRESSFUNCTION => 'curl_progresscallback', CURLOPT_BUFFERSIZE => 256);
            }
            trigger_error(__('Upload to GStorage now started... ', 'backwpup'), E_USER_NOTICE);
            //transfere file to GStorage
            $result = $gstorage->create_mpu_object($STATIC['JOB']['GStorageBucket'], $STATIC['JOB']['GStoragedir'] . $STATIC['backupfile'], array('fileUpload' => $STATIC['JOB']['backupdir'] . $STATIC['backupfile'], 'acl' => 'private', 'partSize' => 26214400, 'curlopts' => $curlops));
            $result = (array) $result;
            if ($result["status"] = 200 and $result["status"] < 300) {
                $WORKING['STEPTODO'] = 1 + filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']);
                trigger_error(sprintf(__('Backup transferred to %s', 'backwpup'), "https://sandbox.google.com/storage/" . $STATIC['JOB']['GStorageBucket'] . "/" . $STATIC['JOB']['GStoragedir'] . $STATIC['backupfile']), E_USER_NOTICE);
                $STATIC['JOB']['lastbackupdownloadurl'] = "https://sandbox.google.com/storage/" . $STATIC['JOB']['GStorageBucket'] . "/" . $STATIC['JOB']['GStoragedir'] . $STATIC['backupfile'];
                $WORKING['STEPSDONE'][] = 'DEST_GSTORAGE';
                //set done
            } else {
                trigger_error(sprintf(__('Can not transfer backup to GStorage! (%1$d) %2$s', 'backwpup'), $result["status"], $result["Message"]), E_USER_ERROR);
            }
        } else {
            trigger_error(sprintf(__('GStorage Bucket "%s" not exists!', 'backwpup'), $STATIC['JOB']['GStorageBucket']), E_USER_ERROR);
        }
    } catch (Exception $e) {
        trigger_error(sprintf(__('GStorage API: %s', 'backwpup'), $e->getMessage()), E_USER_ERROR);
        return;
    }
    try {
        if ($gstorage->if_bucket_exists($STATIC['JOB']['GStorageBucket'])) {
            if ($STATIC['JOB']['GStoragemaxbackups'] > 0) {
                //Delete old backups
                $backupfilelist = array();
                if (($contents = $gstorage->list_objects($STATIC['JOB']['GStorageBucket'], array('prefix' => $STATIC['JOB']['GStoragedir']))) !== false) {
                    foreach ($contents->body->Contents as $object) {
                        $file = basename($object->Key);
                        if ($STATIC['JOB']['fileprefix'] == substr($file, 0, strlen($STATIC['JOB']['fileprefix'])) and $STATIC['JOB']['fileformart'] == substr($file, -strlen($STATIC['JOB']['fileformart']))) {
                            $backupfilelist[] = $file;
                        }
                    }
                }
                if (sizeof($backupfilelist) > 0) {
                    rsort($backupfilelist);
                    $numdeltefiles = 0;
                    for ($i = $STATIC['JOB']['GStoragemaxbackups']; $i < sizeof($backupfilelist); $i++) {
                        if ($gstorage->delete_object($STATIC['JOB']['GStorageBucket'], $STATIC['JOB']['GStoragedir'] . $backupfilelist[$i])) {
                            //delte files on S3
                            $numdeltefiles++;
                        } else {
                            trigger_error(sprintf(__('Can not delete backup on GStorage://%s', 'backwpup'), $STATIC['JOB']['awsBucket'] . '/' . $STATIC['JOB']['GStoragedir'] . $backupfilelist[$i]), E_USER_ERROR);
                        }
                    }
                    if ($numdeltefiles > 0) {
                        trigger_error(sprintf(_n('One file deleted on GStorage Bucket', '%d files deleted on GStorage Bucket', $numdeltefiles, 'backwpup'), $numdeltefiles), E_USER_NOTICE);
                    }
                }
            }
        }
    } catch (Exception $e) {
        trigger_error(sprintf(__('GStorage API: %s', 'backwpup'), $e->getMessage()), E_USER_ERROR);
        return;
    }
    $WORKING['STEPDONE']++;
}
开发者ID:rubyerme,项目名称:rubyerme.github.com,代码行数:76,代码来源:dest_gstorage.php

示例3: dest_s3

function dest_s3()
{
    global $WORKING, $STATIC;
    trigger_error(sprintf(__('%d. try sending backup file to Amazon S3...', 'backwpup'), $WORKING['DEST_S3']['STEP_TRY']), E_USER_NOTICE);
    $WORKING['STEPTODO'] = 2 + filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']);
    $WORKING['STEPDONE'] = 0;
    require_once dirname(__FILE__) . '/../libs/aws/sdk.class.php';
    need_free_memory(26214400 * 1.1);
    try {
        $s3 = new AmazonS3(array('key' => $STATIC['JOB']['awsAccessKey'], 'secret' => $STATIC['JOB']['awsSecretKey'], 'certificate_authority' => true));
        if ($s3->if_bucket_exists($STATIC['JOB']['awsBucket'])) {
            trigger_error(sprintf(__('Connected to S3 Bucket: %s', 'backwpup'), $STATIC['JOB']['awsBucket']), E_USER_NOTICE);
            //Transfer Backup to S3
            if ($STATIC['JOB']['awsrrs']) {
                //set reduced redundancy or not
                $storage = AmazonS3::STORAGE_REDUCED;
            } else {
                $storage = AmazonS3::STORAGE_STANDARD;
            }
            //set surl Prozess bar
            $curlops = array();
            if (defined('CURLOPT_PROGRESSFUNCTION')) {
                $curlops = array(CURLOPT_NOPROGRESS => false, CURLOPT_PROGRESSFUNCTION => 'curl_progresscallback', CURLOPT_BUFFERSIZE => 256);
            }
            trigger_error(__('Upload to Amazon S3 now started... ', 'backwpup'), E_USER_NOTICE);
            //transfere file to S3
            $result = $s3->create_mpu_object($STATIC['JOB']['awsBucket'], $STATIC['JOB']['awsdir'] . $STATIC['backupfile'], array('fileUpload' => $STATIC['JOB']['backupdir'] . $STATIC['backupfile'], 'acl' => AmazonS3::ACL_PRIVATE, 'storage' => $storage, 'partSize' => 26214400, 'curlopts' => $curlops));
            $result = (array) $result;
            if ($result["status"] = 200 and $result["status"] < 300) {
                $WORKING['STEPTODO'] = 1 + filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']);
                trigger_error(sprintf(__('Backup transferred to %s', 'backwpup'), $result["header"]["_info"]["url"]), E_USER_NOTICE);
                $STATIC['JOB']['lastbackupdownloadurl'] = $STATIC['WP']['ADMINURL'] . '?page=backwpupbackups&action=downloads3&file=' . $STATIC['JOB']['awsdir'] . $STATIC['backupfile'] . '&jobid=' . $STATIC['JOB']['jobid'];
                $WORKING['STEPSDONE'][] = 'DEST_S3';
                //set done
            } else {
                trigger_error(sprintf(__('Can not transfer backup to S3! (%1$d) %2$s', 'backwpup'), $result["status"], $result["Message"]), E_USER_ERROR);
            }
        } else {
            trigger_error(sprintf(__('S3 Bucket "%s" not exists!', 'backwpup'), $STATIC['JOB']['awsBucket']), E_USER_ERROR);
        }
    } catch (Exception $e) {
        trigger_error(sprintf(__('Amazon API: %s', 'backwpup'), $e->getMessage()), E_USER_ERROR);
        return;
    }
    try {
        if ($s3->if_bucket_exists($STATIC['JOB']['awsBucket'])) {
            if ($STATIC['JOB']['awsmaxbackups'] > 0) {
                //Delete old backups
                $backupfilelist = array();
                if (($contents = $s3->list_objects($STATIC['JOB']['awsBucket'], array('prefix' => $STATIC['JOB']['awsdir']))) !== false) {
                    foreach ($contents->body->Contents as $object) {
                        $file = basename($object->Key);
                        if ($STATIC['JOB']['fileprefix'] == substr($file, 0, strlen($STATIC['JOB']['fileprefix'])) and $STATIC['JOB']['fileformart'] == substr($file, -strlen($STATIC['JOB']['fileformart']))) {
                            $backupfilelist[] = $file;
                        }
                    }
                }
                if (sizeof($backupfilelist) > 0) {
                    rsort($backupfilelist);
                    $numdeltefiles = 0;
                    for ($i = $STATIC['JOB']['awsmaxbackups']; $i < sizeof($backupfilelist); $i++) {
                        if ($s3->delete_object($STATIC['JOB']['awsBucket'], $STATIC['JOB']['awsdir'] . $backupfilelist[$i])) {
                            //delte files on S3
                            $numdeltefiles++;
                        } else {
                            trigger_error(sprintf(__('Can not delete backup on S3://%s', 'backwpup'), $STATIC['JOB']['awsBucket'] . '/' . $STATIC['JOB']['awsdir'] . $backupfilelist[$i]), E_USER_ERROR);
                        }
                    }
                    if ($numdeltefiles > 0) {
                        trigger_error(sprintf(_n('One file deleted on S3 Bucket', '%d files deleted on S3 Bucket', $numdeltefiles, 'backwpup'), $numdeltefiles), E_USER_NOTICE);
                    }
                }
            }
        }
    } catch (Exception $e) {
        trigger_error(sprintf(__('Amazon API: %s', 'backwpup'), $e->getMessage()), E_USER_ERROR);
        return;
    }
    $WORKING['STEPDONE']++;
}
开发者ID:rubyerme,项目名称:rubyerme.github.com,代码行数:80,代码来源:dest_s3.php


注:本文中的AmazonS3::create_mpu_object方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。