当前位置: 首页>>代码示例>>PHP>>正文


PHP AmazonS3::create_object方法代码示例

本文整理汇总了PHP中AmazonS3::create_object方法的典型用法代码示例。如果您正苦于以下问题:PHP AmazonS3::create_object方法的具体用法?PHP AmazonS3::create_object怎么用?PHP AmazonS3::create_object使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在AmazonS3的用法示例。


在下文中一共展示了AmazonS3::create_object方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的PHP代码示例。

示例1: store

 /**
  * {@inheritdoc}
  */
 public function store(BinaryInterface $binary, $path, $filter)
 {
     $objectPath = $this->getObjectPath($path, $filter);
     $storageResponse = $this->storage->create_object($this->bucket, $objectPath, array('body' => $binary->getContent(), 'contentType' => $binary->getMimeType(), 'length' => strlen($binary->getContent()), 'acl' => $this->acl));
     if (!$storageResponse->isOK()) {
         $this->logError('The object could not be created on Amazon S3.', array('objectPath' => $objectPath, 'filter' => $filter, 's3_response' => $storageResponse));
         throw new NotStorableException('The object could not be created on Amazon S3.');
     }
 }
开发者ID:umanit,项目名称:LiipImagineBundle,代码行数:12,代码来源:AmazonS3Resolver.php

示例2: upload

 public function upload($remotePath, $localPath, $cnt = 0)
 {
     $response = $this->_s3->create_object($this->_bucket, $remotePath, array('fileUpload' => $localPath));
     $isOK = $response->isOK();
     if (!$isOK) {
         Api_Core_Application::log("Проблема с загрузкой файла " . $localPath, array('upload', array('header' => $response->header, 'status' => $response->status, 'body' => $response->body)), Api_Component_Log_Logger::LEVEL_ERROR);
         if ($cnt > self::TRY_UPLOAD_CNT) {
             return false;
         }
         sleep(5);
         $cnt++;
         return $this->upload($remotePath, $localPath, $cnt);
     }
     Api_Core_Application::log("Архив {$localPath} отправлен в хранилище");
     return $isOK;
 }
开发者ID:otis22,项目名称:reserve-copy-system,代码行数:16,代码来源:Amazon.php

示例3: store

 /**
  * Store Upload file to S3.
  * @param CUploadedFile $uploadedFile
  * @param string $bucket The file to create the object
  * @return string url to the file.
  */
 public function store($uploadedFile, $bucket = NULL)
 {
     if ($this->config['randomPath']) {
         $filePath = $this->config['pathPrefix'] . md5(date('His')) . '/' . $uploadedFile->getName();
     } else {
         $filePath = $this->config['pathPrefix'] . $uploadedFile->getName();
     }
     if ($bucket === NULL) {
         $bucket = $this->config['defaultBucket'];
     }
     /** @var CFResponse $result */
     $result = $this->s3->create_object($bucket, $filePath, array('fileUpload' => $uploadedFile->getTempName(), 'acl' => $this->config['defaultACL']));
     if ($result->isOk()) {
         return urldecode($this->s3->get_object_url($bucket, $filePath));
     } else {
         Yii::log("STATUS:" . $result->status . "\nHEDAER:" . $result->header . "\nBODY:" . $result->body, CLogger::LEVEL_ERROR, "application");
         throw new CException($result->status);
     }
 }
开发者ID:jamesmoey,项目名称:yii-amazon,代码行数:25,代码来源:AmazonS3Component.php

示例4: createDirectory

 /**
  * create a directory
  *
  * @param   string  $filePath
  *
  * @return  CFResponse
  */
 protected function createDirectory($filePath)
 {
     // bail-out of the directory already exists
     if ($this->isDir($this->metadata($filePath))) {
         $this->debug(sprintf(self::MSG_DIR_NOTCREATED_EXISTS, $filePath));
         return;
     }
     // properties that make up a directory
     $acl = AmazonS3::ACL_PUBLIC;
     $body = null;
     $contentType = 'binary/octet-stream';
     // create directory
     $response = $this->s3->create_object($this->bucket, sprintf('%s/', $filePath), compact('acl', 'body', 'contentType'));
     return $response;
 }
开发者ID:lucasmilin,项目名称:filemanager-php-s3-plugin,代码行数:22,代码来源:filemanager.s3.class.php

示例5: uploadSingle

 /**
  * Upload a single item to S3
  * 
  * @param array $file The PHP FILE array for the file
  * @param string $target The relative path in the bucket in which to place the file
  * @param array $options An array of options for uploading to S3
  * @return bool|string
  */
 public function uploadSingle($file, $target, array $options = array())
 {
     $options = array_merge(array('acl' => AmazonS3::ACL_PUBLIC), $options);
     if (is_array($file)) {
         $filename = basename($file['name']);
         $file = $file['tmp_name'];
     } else {
         $filename = basename($file);
     }
     $options['fileUpload'] = $file;
     $response = $this->s3->create_object($this->bucket, $target . $filename, $options);
     if ($response->status != 200) {
         return false;
     }
     return $this->s3->get_object_url($this->bucket, $target . $filename);
 }
开发者ID:ChrstnMgcn,项目名称:revolution,代码行数:24,代码来源:modaws.class.php

示例6: uploadObjectsToContainer

 /**
  * Upload files to S3
  * 
  * @param string $container
  * @param array $objects
  * @return bool
  */
 public function uploadObjectsToContainer($container, array $objects = array())
 {
     if ($container == '/' || $container == '.') {
         $container = '';
     }
     $allowedFileTypes = explode(',', $this->xpdo->getOption('upload_files', null, ''));
     $allowedFileTypes = array_merge(explode(',', $this->xpdo->getOption('upload_images')), explode(',', $this->xpdo->getOption('upload_media')), explode(',', $this->xpdo->getOption('upload_flash')), $allowedFileTypes);
     $allowedFileTypes = array_unique($allowedFileTypes);
     $maxFileSize = $this->xpdo->getOption('upload_maxsize', null, 1048576);
     /* loop through each file and upload */
     foreach ($objects as $file) {
         if ($file['error'] != 0) {
             continue;
         }
         if (empty($file['name'])) {
             continue;
         }
         $ext = @pathinfo($file['name'], PATHINFO_EXTENSION);
         $ext = strtolower($ext);
         if (empty($ext) || !in_array($ext, $allowedFileTypes)) {
             $this->addError('path', $this->xpdo->lexicon('file_err_ext_not_allowed', array('ext' => $ext)));
             continue;
         }
         $size = @filesize($file['tmp_name']);
         if ($size > $maxFileSize) {
             $this->addError('path', $this->xpdo->lexicon('file_err_too_large', array('size' => $size, 'allowed' => $maxFileSize)));
             continue;
         }
         $newPath = $container . $file['name'];
         $contentType = $this->getContentType($ext);
         $uploaded = $this->driver->create_object($this->bucket, $newPath, array('fileUpload' => $file['tmp_name'], 'acl' => AmazonS3::ACL_PUBLIC, 'length' => $size, 'contentType' => $contentType));
         if (!$uploaded) {
             $this->addError('path', $this->xpdo->lexicon('file_err_upload'));
         }
     }
     /* invoke event */
     $this->xpdo->invokeEvent('OnFileManagerUpload', array('files' => &$objects, 'directory' => $container, 'source' => &$this));
     $this->xpdo->logManagerAction('file_upload', '', $container);
     return !$this->hasErrors();
 }
开发者ID:nervlin4444,项目名称:modx-cms,代码行数:47,代码来源:mods3mediasource.class.php

示例7: transer_dir


//.........这里部分代码省略.........
         // Instantiate the AmazonS3 class
         $S3 = new AmazonS3(array('key' => trim($_POST['s3']['key']), 'secret' => trim($_POST['s3']['secret_key'])));
         $S3->ssl_verification = FALSE;
         // Init Configs
         $temp = $this->EE->config->item('ci_s3_storage');
         $s3_storage = constant('AmazonS3::' . $temp[$_POST['s3']['storage']]);
         $temp = $this->EE->config->item('ci_s3_acl');
         $s3_acl = constant('AmazonS3::' . $temp[$_POST['s3']['acl']]);
         $s3_directory = trim($_POST['s3']['directory']);
         $s3_bucket = $_POST['s3']['bucket'];
         $s3_subdir = '';
         if ($s3_directory) {
             $s3_subdir = $s3_directory . '/';
         }
         $s3_headers = $this->EE->config->item('ci_s3_headers');
         // Test it
         $resp = $S3->get_bucket_headers($s3_bucket);
         if (!$resp->isOK()) {
             if (isset($resp->body->Message)) {
                 exit('ERROR_S3: ' . $resp->body->Message);
             } else {
                 exit('ERROR_S3: Bucket error');
             }
         }
     } else {
         // Include the SDK
         if (class_exists('CF_Authentication') == FALSE) {
             require_once PATH_THIRD . 'channel_images/locations/cloudfiles/sdk/cloudfiles.php';
         }
         // Which Region?
         if ($_POST['cloudfiles']['region'] == 'uk') {
             $_POST['cloudfiles']['region'] = constant('UK_AUTHURL');
         } else {
             $_POST['cloudfiles']['region'] = constant('US_AUTHURL');
         }
         // Instantiate the Cloudfiles class
         $CF_AUTH = new CF_Authentication($_POST['cloudfiles']['username'], $_POST['cloudfiles']['api'], NULL, $_POST['cloudfiles']['region']);
         try {
             $CF_AUTH->ssl_use_cabundle();
             $CF_AUTH->authenticate();
         } catch (AuthenticationException $e) {
             exit('ERROR_CLOUDFILES:' . $e->getMessage());
         }
         $CF_CONN = new CF_Connection($CF_AUTH);
         $CF_CONN->ssl_use_cabundle();
         $CF_CONT = $CF_CONN->get_container($_POST['cloudfiles']['container']);
     }
     // -----------------------------------------
     // Loop over all dirs
     // -----------------------------------------
     $files = scandir($temp_dir);
     foreach ($files as $file) {
         $full_path = $temp_dir . $file;
         if (is_file($full_path) == false) {
             continue;
         }
         $extension = substr(strrchr($file, '.'), 1);
         // Mime type
         if ($extension == 'jpg') {
             $filemime = 'image/jpeg';
         } elseif ($extension == 'jpeg') {
             $filemime = 'image/jpeg';
         } elseif ($extension == 'png') {
             $filemime = 'image/png';
         } elseif ($extension == 'gif') {
             $filemime = 'image/gif';
         } else {
             continue;
         }
         if (isset($S3) == true) {
             $upload_arr = array();
             $upload_arr['fileUpload'] = $full_path;
             $upload_arr['contentType'] = $filemime;
             $upload_arr['acl'] = $s3_acl;
             $upload_arr['storage'] = $s3_storage;
             $upload_arr['headers'] = array();
             if ($s3_headers != FALSE && is_array($s3_headers) === TRUE) {
                 $upload_arr['headers'] = $s3_headers;
             }
             $response = $S3->create_object($s3_bucket, $s3_subdir . $entry_id . '/' . $file, $upload_arr);
             // Success?
             if (!$response->isOK()) {
                 exit((string) $response->body->Message);
             }
         } else {
             $OBJECT = $CF_CONT->create_object($entry_id . '/' . $file);
             $OBJECT->content_type = $filemime;
             try {
                 $OBJECT->load_from_filename($full_path);
             } catch (Exception $e) {
                 exit($e->getMessage());
             }
         }
         //@unlink($temp_dir.$file);
     }
     @delete_files($temp_dir, true);
     @rmdir($temp_dir);
     $o = array('success' => 'yes');
     exit($this->EE->image_helper->generate_json($o));
 }
开发者ID:ayuinc,项目名称:laboratoria-v2,代码行数:101,代码来源:ajax.channel_images.php

示例8: basename

        ?>
		<?php 
        $tempData = $db->Raw("SELECT `md5`,`filesize`,`fileformat`,`playtime`,`sample_rate`,`location` FROM `userdb_temporary` WHERE `user`='{$user}'");
        ?>
		<?php 
        $filesize = $tempData[0]['filesize'];
        $sample_rate = $tempData[0]['sample_rate'];
        $fileformat = $tempData[0]['fileformat'];
        $md5 = $tempData[0]['md5'];
        $playtime = $tempData[0]['playtime'];
        ?>

		<?php 
        include 'include/aws/sdk.class.php';
        $s3 = new AmazonS3();
        $s3->create_object('fb-music', basename($tempData[0]['location']), array('fileUpload' => $tempData[0]['location'], 'acl' => AmazonS3::ACL_AUTH_READ, 'storage' => AmazonS3::STORAGE_REDUCED));
        /*
              $selDrive = $db->Raw("SELECT `data` FROM `system` WHERE `var`='drive'");
        $userFolder = array_sum(str_split($user));
        
        if(!file_exists('users/' . $selDrive[0]['data'] . '/' . $userFolder . '/'))
        	mkdir('users/' . $selDrive[0]['data'] . '/' . $userFolder . '/');
        rename($tempData[0]['location'], 'users/' . $selDrive[0]['data'] . '/' . $userFolder . '/' . basename($tempData[0]['location']) . '');
        */
        $db->Raw("DELETE FROM `userdb_temporary` WHERE `user`='{$user}' LIMIT 1");
        unlink($tempData[0]['location']);
        /*
        $link = '' . $config['server']['streaming'] . '/stream/' . $selDrive[0]['data'] . '/' . $userFolder . '/' . basename($tempData[0]['location']) . '';
        $drive = $selDrive[0]['data'];
        */
        $link = basename($tempData[0]['location']);
开发者ID:sjlu,项目名称:fb-music-app,代码行数:31,代码来源:app.upload.php

示例9: dest_gstorage

function dest_gstorage()
{
    global $WORKING, $STATIC;
    trigger_error(sprintf(__('%d. try sending backup to Google Storage...', 'backwpup'), $WORKING['DEST_GSTORAGE']['STEP_TRY']), E_USER_NOTICE);
    $WORKING['STEPTODO'] = 2 + filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']);
    $WORKING['STEPDONE'] = 0;
    require_once dirname(__FILE__) . '/../libs/aws/sdk.class.php';
    need_free_memory(26214400 * 1.1);
    try {
        $gstorage = new AmazonS3(array('key' => $STATIC['JOB']['GStorageAccessKey'], 'secret' => $STATIC['JOB']['GStorageSecret'], 'certificate_authority' => true));
        //set up s3 for google
        $gstorage->set_hostname('storage.googleapis.com');
        $gstorage->allow_hostname_override(false);
        if ($gstorage->if_bucket_exists($STATIC['JOB']['GStorageBucket'])) {
            trigger_error(sprintf(__('Connected to GStorage Bucket: %s', 'backwpup'), $STATIC['JOB']['GStorageBucket']), E_USER_NOTICE);
            //set curl Prozess bar
            $curlops = array();
            if (defined('CURLOPT_PROGRESSFUNCTION')) {
                $curlops = array(CURLOPT_NOPROGRESS => false, CURLOPT_PROGRESSFUNCTION => 'curl_progresscallback', CURLOPT_BUFFERSIZE => 1048576);
            }
            trigger_error(__('Upload to GStorage now started... ', 'backwpup'), E_USER_NOTICE);
            //transferee file to GStorage
            $result = $gstorage->create_object($STATIC['JOB']['GStorageBucket'], $STATIC['JOB']['GStoragedir'] . $STATIC['backupfile'], array('fileUpload' => $STATIC['JOB']['backupdir'] . $STATIC['backupfile'], 'acl' => 'private', 'curlopts' => $curlops));
            $result = (array) $result;
            if ($result["status"] >= 200 and $result["status"] < 300) {
                $WORKING['STEPTODO'] = 1 + filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']);
                trigger_error(sprintf(__('Backup transferred to %s', 'backwpup'), "https://storage.cloud.google.com/" . $STATIC['JOB']['GStorageBucket'] . "/" . $STATIC['JOB']['GStoragedir'] . $STATIC['backupfile']), E_USER_NOTICE);
                $STATIC['JOB']['lastbackupdownloadurl'] = "https://storage.cloud.google.com/" . $STATIC['JOB']['GStorageBucket'] . "/" . $STATIC['JOB']['GStoragedir'] . $STATIC['backupfile'];
                $WORKING['STEPSDONE'][] = 'DEST_GSTORAGE';
                //set done
            } else {
                trigger_error(sprintf(__('Can not transfer backup to GStorage! (%1$d) %2$s', 'backwpup'), $result["status"], $result["Message"]), E_USER_ERROR);
            }
        } else {
            trigger_error(sprintf(__('GStorage Bucket "%s" not exists!', 'backwpup'), $STATIC['JOB']['GStorageBucket']), E_USER_ERROR);
        }
    } catch (Exception $e) {
        trigger_error(sprintf(__('GStorage API: %s', 'backwpup'), $e->getMessage()), E_USER_ERROR);
        return;
    }
    try {
        if ($gstorage->if_bucket_exists($STATIC['JOB']['GStorageBucket'])) {
            if ($STATIC['JOB']['GStoragemaxbackups'] > 0) {
                //Delete old backups
                $backupfilelist = array();
                if (($contents = $gstorage->list_objects($STATIC['JOB']['GStorageBucket'], array('prefix' => $STATIC['JOB']['GStoragedir']))) !== false) {
                    foreach ($contents->body->Contents as $object) {
                        $file = basename($object->Key);
                        if ($STATIC['JOB']['fileprefix'] == substr($file, 0, strlen($STATIC['JOB']['fileprefix'])) and $STATIC['JOB']['fileformart'] == substr($file, -strlen($STATIC['JOB']['fileformart']))) {
                            $backupfilelist[] = $file;
                        }
                    }
                }
                if (sizeof($backupfilelist) > 0) {
                    rsort($backupfilelist);
                    $numdeltefiles = 0;
                    for ($i = $STATIC['JOB']['GStoragemaxbackups']; $i < sizeof($backupfilelist); $i++) {
                        if ($gstorage->delete_object($STATIC['JOB']['GStorageBucket'], $STATIC['JOB']['GStoragedir'] . $backupfilelist[$i])) {
                            //delte files on S3
                            $numdeltefiles++;
                        } else {
                            trigger_error(sprintf(__('Can not delete backup on GStorage://%s', 'backwpup'), $STATIC['JOB']['awsBucket'] . '/' . $STATIC['JOB']['GStoragedir'] . $backupfilelist[$i]), E_USER_ERROR);
                        }
                    }
                    if ($numdeltefiles > 0) {
                        trigger_error(sprintf(_n('One file deleted on GStorage Bucket', '%d files deleted on GStorage Bucket', $numdeltefiles, 'backwpup'), $numdeltefiles), E_USER_NOTICE);
                    }
                }
            }
        }
    } catch (Exception $e) {
        trigger_error(sprintf(__('GStorage API: %s', 'backwpup'), $e->getMessage()), E_USER_ERROR);
        return;
    }
    $WORKING['STEPDONE']++;
}
开发者ID:hscale,项目名称:webento,代码行数:76,代码来源:dest_gstorage.php

示例10: saveFileToApplicationBucket

 public function saveFileToApplicationBucket($file_location, $filename, $prefix, $permissions = null)
 {
     $permissions = is_null($permissions) ? AmazonS3::ACL_PRIVATE : $permissions;
     $location = $file_location . $filename;
     if (!file_exists($location)) {
         throw new Exception("No local file to upload!");
     }
     ProjectConfiguration::registerAws();
     $s3 = new AmazonS3();
     $bucket = ProjectConfiguration::getApplicationAmazonBucketName();
     if ($s3->if_bucket_exists($bucket)) {
         $s3->delete_object($bucket, $prefix . '/' . $filename);
         $response = $s3->create_object($bucket, $prefix . '/' . $filename, array('fileUpload' => $location, 'acl' => $permissions));
         if (!$response->isOK()) {
             throw new Exception("Error uploading file!");
         }
     } else {
         throw new Exception("Amazon bucket '{$bucket}' does not exist!");
     }
     return $response;
 }
开发者ID:nocoolnametom,项目名称:OpenMicNight,代码行数:21,代码来源:Subreddit.class.php

示例11: createImgThumb

function createImgThumb($link, $conf)
{
    // get the file
    $hash = md5($link);
    $res = '';
    $filePathDestOriginal = $conf->originalpath() . $hash . '.jpg';
    $filePathDestThumb = $conf->thumbpath() . $hash . '.jpg';
    $filePathDestMedium = $conf->mediumpath() . $hash . '.jpg';
    $filePathDestBig = $conf->bigpath() . $hash . '.jpg';
    $ch = curl_init($link);
    curl_setopt($ch, CURLOPT_HEADER, 0);
    curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1);
    curl_setopt($ch, CURLOPT_BINARYTRANSFER, 1);
    $rawdata = curl_exec($ch);
    curl_close($ch);
    if (file_exists($filePathDestOriginal)) {
        @unlink($filePathDestOriginal);
    }
    $fp = fopen($filePathDestOriginal, 'x');
    fwrite($fp, $rawdata);
    fclose($fp);
    // create thumb and full size
    if ($rawdata) {
        $res1 = redimg(array(0 => array('W' => 120, 'H' => 90)), $filePathDestThumb, $filePathDestOriginal, 0);
        $res2 = redimg(array(0 => array('W' => 256, 'H' => 0)), $filePathDestMedium, $filePathDestOriginal, 0);
        $res3 = redimg(array(0 => array('W' => 512, 'H' => 0)), $filePathDestBig, $filePathDestOriginal, 0);
        require_once "aws-sdk/sdk.class.php";
        $s3 = new AmazonS3();
        if (file_exists($filePathDestThumb)) {
            $response1 = $s3->create_object($conf->bucket(), '120_90/' . $hash . '.jpg', array('fileUpload' => $filePathDestThumb, 'contentType' => 'image/jpeg', 'acl' => AmazonS3::ACL_PUBLIC));
        }
        if (file_exists($filePathDestMedium)) {
            $response2 = $s3->create_object($conf->bucket(), '256_0/' . $hash . '.jpg', array('fileUpload' => $filePathDestMedium, 'contentType' => 'image/jpeg', 'acl' => AmazonS3::ACL_PUBLIC));
        }
        if (file_exists($filePathDestBig)) {
            $response3 = $s3->create_object($conf->bucket(), '512_0/' . $hash . '.jpg', array('fileUpload' => $filePathDestBig, 'contentType' => 'image/jpeg', 'acl' => AmazonS3::ACL_PUBLIC));
        }
        if ($res1 && $res2 && $res3) {
            $res = $hash . '.jpg';
        }
        // NOTE :  your local server has to be on time to send images to S3
        //var_dump($response1);
        //var_dump($response2);
        //var_dump($response3);
        if ($response1->status == 200 && $response2->status == 200 && $response3->status == 200) {
            unlink($filePathDestOriginal);
        }
        if ($response1->status == 200) {
            unlink($filePathDestThumb);
        }
        if ($response2->status == 200) {
            unlink($filePathDestMedium);
        }
        if ($response3->status == 200) {
            unlink($filePathDestBig);
        }
    } else {
        $res = '';
    }
    return $res;
}
开发者ID:rebe100x,项目名称:YAKREP,代码行数:61,代码来源:library.php

示例12: cache_thumb

    public function cache_thumb($job, $user_id)
    {
        $keepsizes = array('thumb_medium.', 'thumb_large.');
        $apikey = DB::get()->val("SELECT value FROM options WHERE grouping = 'Thumbnails' AND name = 'Bluga API Key'");
        $username = DB::get()->val('SELECT username FROM users where id = ?', array($user_id));
        $statusrequest = <<<STATUSREQ
<webthumb>
\t<apikey>{$apikey}</apikey>
\t<status>
\t\t<job>{$job}</job>
\t</status>
</webthumb>
STATUSREQ;
        //header('Content-type: text/plain');
        //echo "{$statusrequest}\n";
        $xml = new SimpleXMLElement(self::execute('http://webthumb.bluga.net/api.php', 'POST', $statusrequest));
        //echo "$jobs\n";
        //echo $xml->asXML();
        $href = false;
        foreach ($xml->jobStatus->status as $status) {
            if ((string) $status == 'Complete') {
                $zipurl = $status['pickup'];
                $zipfiledata = self::execute($zipurl);
                $zipfile = tempnam(sys_get_temp_dir(), 'thm');
                file_put_contents($zipfile, $zipfiledata);
                if (file_exists($zipfile)) {
                    $zip = zip_open($zipfile);
                    $names = array();
                    while ($zip_entry = zip_read($zip)) {
                        $size = zip_entry_filesize($zip_entry);
                        $zdata = zip_entry_read($zip_entry, $size);
                        $zfile = zip_entry_name($zip_entry);
                        $keep = false;
                        foreach ($keepsizes as $size) {
                            if (strpos($zfile, $size) !== false) {
                                $keep = true;
                                break;
                            }
                        }
                        if (strpos($zfile, '-') === false) {
                            $keep = true;
                        }
                        if ($keep) {
                            $access = DB::get()->assoc("SELECT name, value FROM options WHERE grouping = 'Amazon Web Services'");
                            $bucketname = $access['S3 Bucket Name'];
                            $s3 = new AmazonS3($access['AWS Access Key ID'], $access['AWS Secret Access Key']);
                            $s3filename = strtolower(preg_replace('%\\W+%', '', $username)) . '/' . date('Ym') . '/webthumb_';
                            $s3filename .= basename($zfile);
                            $s3filename = trim($s3filename, '/');
                            $headers = get_headers($href, 1);
                            $opt = array('filename' => $s3filename, 'body' => $zdata, 'contentType' => 'image/png', 'acl' => S3_ACL_OPEN);
                            $s3->create_object($bucketname, $opt);
                            $href = "http://{$bucketname}.s3.amazonaws.com/{$s3filename}#{$username}:{$user_id}";
                        }
                    }
                    zip_close($zip);
                    unlink($zipfile);
                }
            }
        }
        return $href;
    }
开发者ID:amitchouhan004,项目名称:barchat,代码行数:62,代码来源:url.php

示例13: send


//.........这里部分代码省略.........
             }
             // Get chunk parts for multipart transfer.
             pb_backupbuddy::status('details', 'Stash getting multipart counts.');
             $parts = $s3->get_multipart_counts($file_size, $max_chunk_size * 1024 * 1024);
             // Size of chunks expected to be in bytes.
             $multipart_destination_settings = $settings;
             $multipart_destination_settings['_multipart_id'] = $upload_id;
             $multipart_destination_settings['_multipart_partnumber'] = 0;
             $multipart_destination_settings['_multipart_file'] = $file;
             $multipart_destination_settings['_multipart_counts'] = $parts;
             $multipart_destination_settings['_multipart_upload_data'] = $upload_data;
             $multipart_destination_settings['_multipart_backup_type_dir'] = $backup_type_dir;
             pb_backupbuddy::status('details', 'Stash multipart settings to pass:' . print_r($multipart_destination_settings, true));
             unset($files[$file_id]);
             // Remove this file from queue of files to send as it is now passed off to be handled in multipart upload.
             // Schedule to process the parts.
             pb_backupbuddy::status('details', 'Stash scheduling send of next part(s).');
             wp_schedule_single_event(time(), pb_backupbuddy::cron_tag('destination_send'), array($multipart_destination_settings, $files, 'multipart', false));
             spawn_cron(time() + 150);
             // Adds > 60 seconds to get around once per minute cron running limit.
             update_option('_transient_doing_cron', 0);
             // Prevent cron-blocking for next item.
             pb_backupbuddy::status('details', 'Stash scheduled send of next part(s). Done for this cycle.');
             return array($upload_id, 'Starting send of ' . count($multipart_destination_settings['_multipart_counts']) . ' parts.');
         } else {
             if ($max_chunk_size != '0') {
                 pb_backupbuddy::status('details', 'File size of ' . $file_size / 1024 / 1024 . 'MB is less than the max chunk size of ' . $max_chunk_size . 'MB; not chunking into multipart upload.');
             } else {
                 pb_backupbuddy::status('details', 'Max chunk size set to zero so not chunking into multipart upload.');
             }
         }
         // SEND file.
         pb_backupbuddy::status('details', 'About to put (upload) object to Stash.');
         $response = $s3->create_object($upload_data['bucket'], $upload_data['object'], array('fileUpload' => $file, 'encryption' => 'AES256'));
         //  we can also utilize the multi-part-upload to create an object
         //  $response = $s3->create_mpu_object($upload_data['bucket'], $upload_data['object'], array('fileUpload'=>$upload_file));
         // Validate response. On failure notify Stash API that things went wrong.
         if (!$response->isOK()) {
             pb_backupbuddy::status('details', 'Sending upload abort.');
             $request = new RequestCore($abort_url);
             $response = $request->send_request(true);
             $this_error = 'Could not upload to Stash, attempt aborted.';
             $pb_backupbuddy_destination_errors[] = $this_error;
             pb_backupbuddy::status('error', $this_error);
             return false;
         } else {
             //	pb_backupbuddy::status( 'details', 'Stash file upload speed: ' . ( $response->header['_info']['speed_upload'] / 1024 / 1024 ) . 'MB/sec. This number may be invalid for small file transfers.' );
             pb_backupbuddy::status('details', 'Stash put success. Need to nofity Stash of upload completion. Details: `' . print_r($response, true) . '`.');
         }
         delete_transient('pb_backupbuddy_stashquota_' . $settings['itxapi_username']);
         // Delete quota transient since it probably has changed now.
         // Notify Stash API that things were succesful.
         $done_url = $stash->get_upload_url($file, 'done', $remote_path . $backup_type_dir . basename($file));
         pb_backupbuddy::status('details', 'Notifying Stash of completed upload with done url `' . $done_url . '`.');
         $request = new RequestCore($done_url);
         $response = $request->send_request(true);
         if (!$response->isOK()) {
             $this_error = 'Error #756834682. Could not finalize Stash upload. Response code: `' . $response->get_response_code() . '`; Response body: `' . $response->get_response_body() . '`; Response headers: `' . $response->get_response_header() . '`.';
             $pb_backupbuddy_destination_errors[] = $this_error;
             pb_backupbuddy::status('error', $this_error);
             return false;
         } else {
             // Good server response.
             // See if we got an optional json response.
             $upload_data = @json_decode($response->body, true);
             if (isset($upload_data['error'])) {
开发者ID:CherylMuniz,项目名称:fashion,代码行数:67,代码来源:init.php

示例14: date

<?php

/*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */
header("Content-type:text/plain; charset:utf-8");
include_once 'sdk.class.php';
$s3 = new AmazonS3();
$bucketname = "empresasctmbucketpruebas";
//CREA UN BUCKET
//$response = $s3->create_bucket($bucketname, $s3::REGION_US_E1);
//print_r($response);
//LISTA BUCKETS
//$response = $s3->get_bucket_list();
//print_r($response);
//UPLOAD
//$response = $s3->create_object($bucketname, "PRUEBA-".date('ljS-FYh:i:sA'),
//        array(
//          'body' => "EMPTY",
//            'contentType' => 'text/plain',
//            'acl' => $s3::ACL_PUBLIC
//        ));
//print_r($response);
$response = $s3->create_object($bucketname, "PRUEBA-" . date('ljS-FYh:i:sA') . ".png", array('fileUpload' => "/home/naito/Escritorio/Firsttets.png", 'acl' => $s3::ACL_PUBLIC));
print_r($response);
开发者ID:uiDeveloper116,项目名称:webstore,代码行数:26,代码来源:S3.php

示例15: dest_s3

function dest_s3()
{
    global $WORKING, $STATIC;
    trigger_error(sprintf(__('%d. try sending backup file to Amazon S3...', 'backwpup'), $WORKING['DEST_S3']['STEP_TRY']), E_USER_NOTICE);
    $WORKING['STEPTODO'] = 2 + filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']);
    $WORKING['STEPDONE'] = 0;
    require_once dirname(__FILE__) . '/../libs/aws/sdk.class.php';
    need_free_memory(26214400 * 1.1);
    try {
        $s3 = new AmazonS3(array('key' => $STATIC['JOB']['awsAccessKey'], 'secret' => $STATIC['JOB']['awsSecretKey'], 'certificate_authority' => true));
        if ($s3->if_bucket_exists($STATIC['JOB']['awsBucket'])) {
            trigger_error(sprintf(__('Connected to S3 Bucket: %s', 'backwpup'), $STATIC['JOB']['awsBucket']), E_USER_NOTICE);
            //Transfer Backup to S3
            if ($STATIC['JOB']['awsrrs']) {
                //set reduced redundancy or not
                $storage = AmazonS3::STORAGE_REDUCED;
            } else {
                $storage = AmazonS3::STORAGE_STANDARD;
            }
            //set curl Progress bar
            $curlops = array();
            if (defined('CURLOPT_PROGRESSFUNCTION')) {
                $curlops = array(CURLOPT_NOPROGRESS => false, CURLOPT_PROGRESSFUNCTION => 'curl_progresscallback', CURLOPT_BUFFERSIZE => 1048576);
            }
            trigger_error(__('Upload to Amazon S3 now started... ', 'backwpup'), E_USER_NOTICE);
            //transferee file to S3
            $result = $s3->create_object($STATIC['JOB']['awsBucket'], $STATIC['JOB']['awsdir'] . $STATIC['backupfile'], array('fileUpload' => $STATIC['JOB']['backupdir'] . $STATIC['backupfile'], 'acl' => AmazonS3::ACL_PRIVATE, 'storage' => $storage, 'curlopts' => $curlops));
            $result = (array) $result;
            if ($result["status"] >= 200 and $result["status"] < 300) {
                $WORKING['STEPTODO'] = 1 + filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']);
                trigger_error(sprintf(__('Backup transferred to %s', 'backwpup'), $result["header"]["_info"]["url"]), E_USER_NOTICE);
                $STATIC['JOB']['lastbackupdownloadurl'] = $STATIC['WP']['ADMINURL'] . '?page=backwpupbackups&action=downloads3&file=' . $STATIC['JOB']['awsdir'] . $STATIC['backupfile'] . '&jobid=' . $STATIC['JOB']['jobid'];
                $WORKING['STEPSDONE'][] = 'DEST_S3';
                //set done
            } else {
                trigger_error(sprintf(__('Can not transfer backup to S3! (%1$d) %2$s', 'backwpup'), $result["status"], $result["Message"]), E_USER_ERROR);
            }
        } else {
            trigger_error(sprintf(__('S3 Bucket "%s" not exists!', 'backwpup'), $STATIC['JOB']['awsBucket']), E_USER_ERROR);
        }
    } catch (Exception $e) {
        trigger_error(sprintf(__('Amazon API: %s', 'backwpup'), $e->getMessage()), E_USER_ERROR);
        return;
    }
    try {
        if ($s3->if_bucket_exists($STATIC['JOB']['awsBucket'])) {
            if ($STATIC['JOB']['awsmaxbackups'] > 0) {
                //Delete old backups
                $backupfilelist = array();
                if (($contents = $s3->list_objects($STATIC['JOB']['awsBucket'], array('prefix' => $STATIC['JOB']['awsdir']))) !== false) {
                    foreach ($contents->body->Contents as $object) {
                        $file = basename($object->Key);
                        if ($STATIC['JOB']['fileprefix'] == substr($file, 0, strlen($STATIC['JOB']['fileprefix'])) and $STATIC['JOB']['fileformart'] == substr($file, -strlen($STATIC['JOB']['fileformart']))) {
                            $backupfilelist[] = $file;
                        }
                    }
                }
                if (sizeof($backupfilelist) > 0) {
                    rsort($backupfilelist);
                    $numdeltefiles = 0;
                    for ($i = $STATIC['JOB']['awsmaxbackups']; $i < sizeof($backupfilelist); $i++) {
                        if ($s3->delete_object($STATIC['JOB']['awsBucket'], $STATIC['JOB']['awsdir'] . $backupfilelist[$i])) {
                            //delte files on S3
                            $numdeltefiles++;
                        } else {
                            trigger_error(sprintf(__('Can not delete backup on S3://%s', 'backwpup'), $STATIC['JOB']['awsBucket'] . '/' . $STATIC['JOB']['awsdir'] . $backupfilelist[$i]), E_USER_ERROR);
                        }
                    }
                    if ($numdeltefiles > 0) {
                        trigger_error(sprintf(_n('One file deleted on S3 Bucket', '%d files deleted on S3 Bucket', $numdeltefiles, 'backwpup'), $numdeltefiles), E_USER_NOTICE);
                    }
                }
            }
        }
    } catch (Exception $e) {
        trigger_error(sprintf(__('Amazon API: %s', 'backwpup'), $e->getMessage()), E_USER_ERROR);
        return;
    }
    $WORKING['STEPDONE']++;
}
开发者ID:hscale,项目名称:webento,代码行数:80,代码来源:dest_s3.php


注:本文中的AmazonS3::create_object方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。