当前位置: 首页>>代码示例>>Python>>正文


Python boto.connect_s3方法代码示例

本文整理汇总了Python中boto.connect_s3方法的典型用法代码示例。如果您正苦于以下问题:Python boto.connect_s3方法的具体用法?Python boto.connect_s3怎么用?Python boto.connect_s3使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在boto的用法示例。


在下文中一共展示了boto.connect_s3方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: upload_output_to_s3

# 需要导入模块: import boto [as 别名]
# 或者: from boto import connect_s3 [as 别名]
def upload_output_to_s3(job, job_vars):
    """
    If s3_dir is specified in arguments, file will be uploaded to S3 using boto.
    WARNING: ~/.boto credentials are necessary for this to succeed!

    job_vars: tuple     Tuple of dictionaries: input_args and ids
    """
    import boto
    from boto.s3.key import Key

    input_args, ids = job_vars
    work_dir = job.fileStore.getLocalTempDir()
    uuid = input_args['uuid']
    # Parse s3_dir
    s3_dir = input_args['s3_dir']
    bucket_name = s3_dir.split('/')[0]
    bucket_dir = '/'.join(s3_dir.split('/')[1:])
    # I/O
    uuid_tar = return_input_paths(job, work_dir, ids, 'uuid.tar.gz')
    # Upload to S3 via boto
    conn = boto.connect_s3()
    bucket = conn.get_bucket(bucket_name)
    k = Key(bucket)
    k.key = os.path.join(bucket_dir, uuid + '.tar.gz')
    k.set_contents_from_filename(uuid_tar) 
开发者ID:BD2KGenomics,项目名称:toil-scripts,代码行数:27,代码来源:rnaseq_unc_tcga_versions.py

示例2: upload_pitr_data

# 需要导入模块: import boto [as 别名]
# 或者: from boto import connect_s3 [as 别名]
def upload_pitr_data(self, db, tbl, pitr_data):
        """ Upload a file of PITR data to s3 for each table

        Args:
        db - the db that was backed up.
        tbl - the table that was backed up.
        pitr_data - a dict of various data that might be helpful for running a
                    PITR
        """
        zk = host_utils.MysqlZookeeper()
        replica_set = zk.get_replica_set_from_instance(self.instance)
        s3_path = PATH_PITR_DATA.format(replica_set=replica_set,
                                        date=self.datestamp,
                                        db_name=db, table=tbl)
        log.debug('{proc_id}: {db}.{tbl} Uploading pitr data to {s3_path}'
                  ''.format(s3_path=s3_path,
                            proc_id=multiprocessing.current_process().name,
                            db=db, tbl=tbl))
        boto_conn = boto.connect_s3()
        bucket = boto_conn.get_bucket(self.upload_bucket, validate=False)
        key = bucket.new_key(s3_path)
        key.set_contents_from_string(json.dumps(pitr_data)) 
开发者ID:pinterest,项目名称:mysql_utils,代码行数:24,代码来源:mysql_backup_csv.py

示例3: upload_schema

# 需要导入模块: import boto [as 别名]
# 或者: from boto import connect_s3 [as 别名]
def upload_schema(self, db, table, tmp_dir_db):
        """ Upload the schema of a table to s3

        Args:
            db - the db to be backed up
            table - the table to be backed up
            tmp_dir_db - temporary storage used for all tables in the db
        """
        (schema_path, _, _) = backup.get_csv_backup_paths(
                                    self.instance, db, table, self.datestamp)
        create_stm = mysql_lib.show_create_table(self.instance, db, table)
        log.debug('{proc_id}: Uploading schema to {schema_path}'
                  ''.format(schema_path=schema_path,
                            proc_id=multiprocessing.current_process().name))
        boto_conn = boto.connect_s3()
        bucket = boto_conn.get_bucket(self.upload_bucket, validate=False)
        key = bucket.new_key(schema_path)
        key.set_contents_from_string(create_stm) 
开发者ID:pinterest,项目名称:mysql_utils,代码行数:20,代码来源:mysql_backup_csv.py

示例4: already_backed_up

# 需要导入模块: import boto [as 别名]
# 或者: from boto import connect_s3 [as 别名]
def already_backed_up(self, table_tuple):
        """ Check to see if a particular partition has already been uploaded
            to s3

        Args:
            table_tuple - (table, partition name, part number)

        Returns:
            bool - True if the partition has already been backed up,
                   False otherwise
        """
        boto_conn = boto.connect_s3()
        bucket = boto_conn.get_bucket(self.upload_bucket, validate=False)
        (_, data_path, _) = backup.get_csv_backup_paths(self.instance,
                                           *table_tuple[0].split('.'),
                                           date=self.datestamp,
                                           partition_number=table_tuple[2])
        if not bucket.get_key(data_path):
            return False
        return True 
开发者ID:pinterest,项目名称:mysql_utils,代码行数:22,代码来源:mysql_backup_csv.py

示例5: already_uploaded

# 需要导入模块: import boto [as 别名]
# 或者: from boto import connect_s3 [as 别名]
def already_uploaded(instance, binlog, logged_uploads):
    """ Check to see if a binlog has already been uploaded

    Args:
    instance - a hostAddr object
    binlog - the full path to the binlog file
    logged_uploads - a set of all uploaded binlogs for this instance

    Returns True if already uploaded, False otherwise.
    """
    if os.path.basename(binlog) in logged_uploads:
        log.debug('Binlog already logged as uploaded')
        return True

    # we should hit this code rarely, only when uploads have not been logged
    boto_conn = boto.connect_s3()
    bucket = boto_conn.get_bucket(environment_specific.BACKUP_BUCKET_UPLOAD_MAP[host_utils.get_iam_role()],
                                  validate=False)
    if bucket.get_key(s3_binlog_path(instance, os.path.basename((binlog)))):
        log.debug("Binlog already uploaded but not logged {b}".format(b=binlog))
        log_binlog_upload(instance, binlog)
        return True

    return False 
开发者ID:pinterest,项目名称:mysql_utils,代码行数:26,代码来源:archive_mysql_binlogs.py

示例6: __get_from_s3

# 需要导入模块: import boto [as 别名]
# 或者: from boto import connect_s3 [as 别名]
def __get_from_s3(self, key):
        try:
            conn = boto.connect_s3(aws_access_key_id=self.__storage_client_config["access_key"],
                               aws_secret_access_key=self.__storage_client_config["secret_key"],
                               host=self.__storage_client_config["endpoint"],
                               port=80,
                               is_secure=False,
                               calling_format=boto.s3.connection.OrdinaryCallingFormat())
            bucket = conn.get_bucket(self.__storage_client_config["bucket"])
            store_key = bucket.get_key(key)
            result = store_key.get_contents_as_string()
            conn.close()
            return result
        except AWSConnectionError as e:
            raise AWSConnectionError("Unable to connect to AWS")
        except Exception as e:
            raise Exception("Exception occured" + str(e)) 
开发者ID:flipkart-incubator,项目名称:Hunch,代码行数:19,代码来源:storage_client.py

示例7: __init__

# 需要导入模块: import boto [as 别名]
# 或者: from boto import connect_s3 [as 别名]
def __init__(self, your_aws_public_key, your_aws_secret_key):
        self.public_key = your_aws_public_key
        self.secret_key = your_aws_secret_key

        s3 = boto.connect_s3(
            aws_access_key_id=self.public_key,
            aws_secret_access_key=self.secret_key,
        )

        for key in s3.get_all_buckets():
            if key.name == 'hcp-openaccess':
                self.s3_bucket = key
                break

        self.hcp_directory = os.path.join(DATA_PATH, 'hcp')
        if not os.path.exists(self.hcp_directory):
            os.makedirs(self.hcp_directory) 
开发者ID:AthenaEPI,项目名称:dmipy,代码行数:19,代码来源:downloader_aws.py

示例8: get_records

# 需要导入模块: import boto [as 别名]
# 或者: from boto import connect_s3 [as 别名]
def get_records(id_, iterator):
    conn = boto.connect_s3(anon=True, host='s3.amazonaws.com')
    bucket = conn.get_bucket('commoncrawl')

    for uri in iterator:
        key_ = Key(bucket, uri)
        _file = warc.WARCFile(fileobj=GzipStreamFile(key_))

        for record in _file:
            if record['Content-Type'] == 'application/json':
                record = json.loads(record.payload.read())
                try:
                    def cc_filter(x):
                        return "creativecommons.org" in x['url']

                    cc_links = filter(cc_filter, list(record['Envelope']['Payload-Metadata']['HTTP-Response-Metadata']['HTML-Metadata']['Links']))
                    if len(cc_links) > 0:
                        yield record
                except KeyError:
                    pass 
开发者ID:creativecommons,项目名称:cccatalog,代码行数:22,代码来源:test_deeds.py

示例9: test_upload_content_type

# 需要导入模块: import boto [as 别名]
# 或者: from boto import connect_s3 [as 别名]
def test_upload_content_type(self):
        """
        A content type can be set for an uploaded file.
        """
        filename = random_name(self)
        tmpdir = FilePath(self.mktemp())
        tmpdir.makedirs()
        tmpfile = tmpdir.child(filename)
        tmpfile.setContent('foo')
        s3 = boto.connect_s3()
        bucket = s3.get_bucket(bucket_name)
        self.addCleanup(bucket.delete_key, filename)
        sync_perform(
            dispatcher=ComposedDispatcher([boto_dispatcher, base_dispatcher]),
            effect=Effect(UploadToS3(
                source_path=tmpdir,
                target_bucket=bucket_name,
                target_key=filename,
                file=tmpfile,
                content_type='application/json',
            ))
        )
        key = bucket.get_key(filename)
        self.assertEqual('application/json', key.content_type) 
开发者ID:ClusterHQ,项目名称:flocker,代码行数:26,代码来源:test_aws.py

示例10: perform_copy_s3_keys

# 需要导入模块: import boto [as 别名]
# 或者: from boto import connect_s3 [as 别名]
def perform_copy_s3_keys(dispatcher, intent):
    """
    See :class:`CopyS3Keys`.
    """
    s3 = boto.connect_s3()
    source_bucket = s3.get_bucket(intent.source_bucket)
    for key in intent.keys:
        source_key = source_bucket.get_key(intent.source_prefix + key)

        # We are explicit about Content-Type here, since the upload tool
        # isn't smart enough to set the right Content-Type.
        destination_metadata = source_key.metadata
        for extention, content_type in EXTENSION_MIME_TYPES.items():
            if key.endswith(extention):
                destination_metadata['Content-Type'] = content_type
                break

        source_key.copy(
            dst_bucket=intent.destination_bucket,
            dst_key=intent.destination_prefix + key,
            metadata=destination_metadata,
        ) 
开发者ID:ClusterHQ,项目名称:flocker,代码行数:24,代码来源:aws.py

示例11: test_PyObjectWalker_boto_connection_2

# 需要导入模块: import boto [as 别名]
# 或者: from boto import connect_s3 [as 别名]
def test_PyObjectWalker_boto_connection_2(self):
        import boto
        bucketName = 'ufora-test-data'

        conn = boto.connect_s3()
        bucket = conn.get_bucket(bucketName)
        key = bucket.get_key("trip_data_1.csv")

        res = key.md5

        walker = PyObjectWalker(
            self.mappings,
            BinaryObjectRegistry()
            )

        walker.walkPyObject(boto)
        walker.walkPyObject(conn)
        walker.walkPyObject(bucket)
        walker.walkPyObject(key)
        walker.walkPyObject(res) 
开发者ID:ufora,项目名称:ufora,代码行数:22,代码来源:PyObjectWalker_test.py

示例12: connectS3

# 需要导入模块: import boto [as 别名]
# 或者: from boto import connect_s3 [as 别名]
def connectS3(self):
        if not boto.config.has_section('Boto'):
            boto.config.add_section('Boto')

        # override the default super-long timeout in boto.
        # boto automatically retries timed out requests so it's best to keep a
        # short timeout because S3 can sometimes (about 1 in 10 requests) stall
        # for a long time.
        boto.config.set('Boto', 'http_socket_timeout', '5')
        boto.config.set('Boto', 'metadata_service_num_attempts', '10')

        az = os.getenv('AWS_AVAILABILITY_ZONE')
        
        boto_args = {}
        if self.credentials_ != ('', ''):
            boto_args = {
                'aws_access_key_id': self.credentials_[0],
                'aws_secret_access_key': self.credentials_[1]
                }
        if az:
            return boto.s3.connect_to_region(az[:-1], **boto_args)
        else:
            return boto.connect_s3(**boto_args) 
开发者ID:ufora,项目名称:ufora,代码行数:25,代码来源:ActualS3Interface.py

示例13: _do_upload

# 需要导入模块: import boto [as 别名]
# 或者: from boto import connect_s3 [as 别名]
def _do_upload(self):
        self.gzip_temp.flush()
        #TODO: move to generalized put() function
        if self.output_dir.startswith('s3://') or self.output_dir.startswith('s3a://'):
            import boto
            conn = boto.connect_s3()
            parts = urlparse.urlsplit(self.output_dir)

            bucket = conn.lookup(parts.netloc)

            cdxkey = bucket.new_key(parts.path + '/' + self.part_name)
            cdxkey.set_contents_from_file(self.gzip_temp, rewind=True)
        else:
            path = os.path.join(self.output_dir, self.part_name)

            self.gzip_temp.seek(0)

            with open(path, 'w+b') as target:
                shutil.copyfileobj(self.gzip_temp, target)

        self.gzip_temp.close() 
开发者ID:ikreymer,项目名称:webarchive-indexing,代码行数:23,代码来源:zipnumclusterjob.py

示例14: mapper_init

# 需要导入模块: import boto [as 别名]
# 或者: from boto import connect_s3 [as 别名]
def mapper_init(self):
        # Note: this assumes that credentials are set via
        # AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY env variables
        self.conn = boto.connect_s3()

        self.warc_bucket = self.conn.lookup(self.options.warc_bucket)
        assert(self.warc_bucket)

        self.cdx_bucket = self.conn.lookup(self.options.cdx_bucket)
        assert(self.cdx_bucket)

        self.index_options = {
            'surt_ordered': True,
            'sort': True,
            'cdxj': True,
            #'minimal': True
        } 
开发者ID:ikreymer,项目名称:webarchive-indexing,代码行数:19,代码来源:indexwarcsjob.py

示例15: get_s3_content_and_delete

# 需要导入模块: import boto [as 别名]
# 或者: from boto import connect_s3 [as 别名]
def get_s3_content_and_delete(bucket, path, with_key=False):
    """ Get content from s3 key, and delete key afterwards.
    """
    if is_botocore():
        import botocore.session
        session = botocore.session.get_session()
        client = session.create_client('s3')
        key = client.get_object(Bucket=bucket, Key=path)
        content = key['Body'].read()
        client.delete_object(Bucket=bucket, Key=path)
    else:
        import boto
        # assuming boto=2.2.2
        bucket = boto.connect_s3().get_bucket(bucket, validate=False)
        key = bucket.get_key(path)
        content = key.get_contents_as_string()
        bucket.delete_key(path)
    return (content, key) if with_key else content 
开发者ID:wistbean,项目名称:learn_python3_spider,代码行数:20,代码来源:test.py


注:本文中的boto.connect_s3方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。