本文整理汇总了Python中google.appengine.ext.blobstore.BlobReader方法的典型用法代码示例。如果您正苦于以下问题:Python blobstore.BlobReader方法的具体用法?Python blobstore.BlobReader怎么用?Python blobstore.BlobReader使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类google.appengine.ext.blobstore
的用法示例。
在下文中一共展示了blobstore.BlobReader方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __init__
# 需要导入模块: from google.appengine.ext import blobstore [as 别名]
# 或者: from google.appengine.ext.blobstore import BlobReader [as 别名]
def __init__(self, blob_key, start_position, end_position):
"""Initializes this instance with the given blob key and character range.
This BlobstoreInputReader will read from the first record starting after
strictly after start_position until the first record ending at or after
end_position (exclusive). As an exception, if start_position is 0, then
this InputReader starts reading at the first record.
Args:
blob_key: the BlobKey that this input reader is processing.
start_position: the position to start reading at.
end_position: a position in the last record to read.
"""
self._blob_key = blob_key
self._blob_reader = blobstore.BlobReader(blob_key,
self._BLOB_BUFFER_SIZE,
start_position)
self._end_position = end_position
self._has_iterated = False
self._read_before_start = bool(start_position)
示例2: get
# 需要导入模块: from google.appengine.ext import blobstore [as 别名]
# 或者: from google.appengine.ext.blobstore import BlobReader [as 别名]
def get(self, zip_key, file_no, offset, md5, _id):
def cleanup(text):
return text.replace('\\n', ' ').replace('\\"', '"').replace('http://inventures.euhttp://inventures.eu/', 'http://inventures.eu/')
blob_reader = blobstore.BlobReader(zip_key)
zip_reader = zipfile.ZipFile(blob_reader)
infolist = zip_reader.infolist()
zipinfo = infolist[int(file_no)]
with zip_reader.open(zipinfo) as f:
f.read(int(offset))
text = f.readline()
found_pattern = text_file_pattern.search(text)
html = found_pattern.group(2)#.replace('/sites/','http://inventures.eu/sites/' )
self.response.out.write(cleanup(html))
return
message = 'ID %s not found' % _id
self.response.out.write('<html><body><p>%s</p></body></html>' % message)
return
示例3: run
# 需要导入模块: from google.appengine.ext import blobstore [as 别名]
# 或者: from google.appengine.ext.blobstore import BlobReader [as 别名]
def run(self, resource):
pairs = 0
with blobstore.BlobReader(resource) as blob_reader:
for line in blob_reader.readlines():
# {'33700': ['/view/Mla4PRwYe1ZpNJN4hluceA==/0/1060348/59518bb889e6/mpmoIZY6S4Si89wdEyX9IA', etc ]}
kdocs = json.loads(line.replace("'", '"'))
k = kdocs.keys()[0]
docs = kdocs[k]
pairs += len(docs) * (len(docs) - 1) / 2
logging.info('Total number of pairs to compute: %d', pairs)
output = yield mapreduce_pipeline.MapreducePipeline(
"results-eval",
"blobs.eval_map",
"blobs.eval_reduce",
'mapreduce.input_readers.BlobstoreLineInputReader',
"mapreduce.output_writers.BlobstoreOutputWriter",
mapper_params={
"blob_keys": resource,
},
reducer_params={
"mime_type": "text/plain",
},
shards=16)
yield StoreEvalResults(resource, output)
示例4: migrate_to_gcs
# 需要导入模块: from google.appengine.ext import blobstore [as 别名]
# 或者: from google.appengine.ext.blobstore import BlobReader [as 别名]
def migrate_to_gcs(self):
if self.original_size_key == self.filename:
raise Exception('This image (%s) looks like it already is in GCS' % self.filename)
content_type = self.get_content_type(self.filename)
image_bytes = blobstore.BlobReader(self.original_size_key, buffer_size=1048576).read()
small_image_bytes = blobstore.BlobReader(self.serving_size_key, buffer_size=1048576).read()
self.original_size_key = self.filename
self.serving_size_key = self.get_small_image_name(self.filename)
filestore.write(self.original_size_key, image_bytes, content_type)
filestore.write(self.serving_size_key, small_image_bytes, content_type)
self.put()
示例5: split_input
# 需要导入模块: from google.appengine.ext import blobstore [as 别名]
# 或者: from google.appengine.ext.blobstore import BlobReader [as 别名]
def split_input(cls, mapper_spec, _reader=blobstore.BlobReader):
"""Returns a list of input shard states for the input spec.
Args:
mapper_spec: The MapperSpec for this InputReader. Must contain
'blob_key' parameter with one blob key.
_reader: a callable that returns a file-like object for reading blobs.
Used for dependency injection.
Returns:
A list of InputReaders spanning files within the zip.
"""
params = _get_params(mapper_spec)
blob_key = params[cls.BLOB_KEY_PARAM]
zip_input = zipfile.ZipFile(_reader(blob_key))
files = zip_input.infolist()
total_size = sum(x.file_size for x in files)
num_shards = min(mapper_spec.shard_count, cls._MAX_SHARD_COUNT)
size_per_shard = total_size // num_shards
# Break the list of files into sublists, each of approximately
# size_per_shard bytes.
shard_start_indexes = [0]
current_shard_size = 0
for i, fileinfo in enumerate(files):
current_shard_size += fileinfo.file_size
if current_shard_size >= size_per_shard:
shard_start_indexes.append(i + 1)
current_shard_size = 0
if shard_start_indexes[-1] != len(files):
shard_start_indexes.append(len(files))
return [cls(blob_key, start_index, end_index, _reader)
for start_index, end_index
in zip(shard_start_indexes, shard_start_indexes[1:])]
示例6: post
# 需要导入模块: from google.appengine.ext import blobstore [as 别名]
# 或者: from google.appengine.ext.blobstore import BlobReader [as 别名]
def post(self):
blob_key = self.request.get('blob_key')
filename = self.request.get("filename")
blob_reader = blobstore.BlobReader(blob_key)
zip_reader = zipfile.ZipFile(blob_reader)
lsh_zipfile(PeerbeltLine, zip_reader, 'gae_test', filename, file_key = blob_key)
return
示例7: get_all_blob_info
# 需要导入模块: from google.appengine.ext import blobstore [as 别名]
# 或者: from google.appengine.ext.blobstore import BlobReader [as 别名]
def get_all_blob_info():
for blob_info in blobstore.BlobInfo.all():
blob_key = blob_info.key()
blob_reader = blobstore.BlobReader(blob_key)
yield blob_info, blob_reader
示例8: get_reader
# 需要导入模块: from google.appengine.ext import blobstore [as 别名]
# 或者: from google.appengine.ext.blobstore import BlobReader [as 别名]
def get_reader(key):
if not isinstance(key, str):
updated_key = str(urllib.unquote(key))
else:
updated_key = key
return blobstore.BlobReader(updated_key)
示例9: from_json
# 需要导入模块: from google.appengine.ext import blobstore [as 别名]
# 或者: from google.appengine.ext.blobstore import BlobReader [as 别名]
def from_json(cls, json, _reader=blobstore.BlobReader):
"""Creates an instance of the InputReader for the given input shard state.
Args:
json: The InputReader state as a dict-like object.
_reader: For dependency injection.
Returns:
An instance of the InputReader configured using the values of json.
"""
return cls(json[cls.BLOB_KEY_PARAM],
json[cls.START_FILE_INDEX_PARAM],
json[cls.END_FILE_INDEX_PARAM],
json[cls.OFFSET_PARAM],
_reader)
示例10: __init__
# 需要导入模块: from google.appengine.ext import blobstore [as 别名]
# 或者: from google.appengine.ext.blobstore import BlobReader [as 别名]
def __init__(self, blob_key, start_position):
self._blob_key = _MaybeStripBlobstorePrefix(blob_key)
self._records_reader = records.RecordsReader(
blobstore.BlobReader(self._blob_key))
self._records_reader.seek(start_position)
示例11: split_input
# 需要导入模块: from google.appengine.ext import blobstore [as 别名]
# 或者: from google.appengine.ext.blobstore import BlobReader [as 别名]
def split_input(cls, mapper_spec, _reader=blobstore.BlobReader):
"""Returns a list of input shard states for the input spec.
Args:
mapper_spec: The MapperSpec for this InputReader. Must contain
'blob_key' parameter with one blob key.
_reader: a callable that returns a file-like object for reading blobs.
Used for dependency injection.
Returns:
A list of InputReaders spanning files within the zip.
"""
params = _get_params(mapper_spec)
blob_key = params[cls.BLOB_KEY_PARAM]
zip_input = zipfile.ZipFile(_reader(blob_key))
zfiles = zip_input.infolist()
total_size = sum(x.file_size for x in zfiles)
num_shards = min(mapper_spec.shard_count, cls._MAX_SHARD_COUNT)
size_per_shard = total_size // num_shards
# Break the list of files into sublists, each of approximately
# size_per_shard bytes.
shard_start_indexes = [0]
current_shard_size = 0
for i, fileinfo in enumerate(zfiles):
current_shard_size += fileinfo.file_size
if current_shard_size >= size_per_shard:
shard_start_indexes.append(i + 1)
current_shard_size = 0
if shard_start_indexes[-1] != len(zfiles):
shard_start_indexes.append(len(zfiles))
return [cls(blob_key, start_index, end_index, _reader)
for start_index, end_index
in zip(shard_start_indexes, shard_start_indexes[1:])]
示例12: _run_test_success
# 需要导入模块: from google.appengine.ext import blobstore [as 别名]
# 或者: from google.appengine.ext.blobstore import BlobReader [as 别名]
def _run_test_success(self, upload_data, upload_url):
"""Basic dispatcher request flow."""
request_path = urlparse.urlparse(upload_url)[2]
# Get session key from upload url.
session_key = upload_url.split('/')[-1]
self.environ['PATH_INFO'] = request_path
self.environ['CONTENT_TYPE'] = (
'multipart/form-data; boundary="================1234=="')
status, _, response_body, forward_environ, forward_body = (
self.run_dispatcher(upload_data))
self.assertEquals('200 OK', status)
self.assertEquals('Forwarded successfully.', response_body)
self.assertNotEquals(None, forward_environ)
# These must NOT be unicode strings.
self.assertIsInstance(forward_environ['PATH_INFO'], str)
if 'QUERY_STRING' in forward_environ:
self.assertIsInstance(forward_environ['QUERY_STRING'], str)
self.assertRegexpMatches(forward_environ['CONTENT_TYPE'],
r'multipart/form-data; boundary="[^"]+"')
self.assertEquals(len(forward_body), int(forward_environ['CONTENT_LENGTH']))
self.assertIn(constants.FAKE_IS_ADMIN_HEADER, forward_environ)
self.assertEquals('1', forward_environ[constants.FAKE_IS_ADMIN_HEADER])
new_request = email.message_from_string(
'Content-Type: %s\n\n%s' % (forward_environ['CONTENT_TYPE'],
forward_body))
(upload,) = new_request.get_payload()
self.assertEquals('message/external-body', upload.get_content_type())
message = email.message.Message()
message.add_header('Content-Type', upload['Content-Type'])
blob_key = message.get_param('blob-key')
blob_contents = blobstore.BlobReader(blob_key).read()
self.assertEquals('value', blob_contents)
self.assertRaises(datastore_errors.EntityNotFoundError,
datastore.Get,
session_key)
return upload, forward_environ, forward_body
示例13: split_input
# 需要导入模块: from google.appengine.ext import blobstore [as 别名]
# 或者: from google.appengine.ext.blobstore import BlobReader [as 别名]
def split_input(cls, mapper_spec, _reader=blobstore.BlobReader):
"""Returns a list of input readers for the input spec.
Args:
mapper_spec: The MapperSpec for this InputReader. Must contain
'blob_keys' parameter with one or more blob keys.
_reader: a callable that returns a file-like object for reading blobs.
Used for dependency injection.
Returns:
A list of InputReaders spanning the subfiles within the blobs.
There will be at least one reader per blob, but it will otherwise
attempt to keep the expanded size even.
"""
params = _get_params(mapper_spec)
blob_keys = params[cls.BLOB_KEYS_PARAM]
if isinstance(blob_keys, basestring):
# This is a mechanism to allow multiple blob keys (which do not contain
# commas) in a single string. It may go away.
blob_keys = blob_keys.split(",")
blob_files = {}
total_size = 0
for blob_key in blob_keys:
zip_input = zipfile.ZipFile(_reader(blob_key))
blob_files[blob_key] = zip_input.infolist()
total_size += sum(x.file_size for x in blob_files[blob_key])
shard_count = min(cls._MAX_SHARD_COUNT, mapper_spec.shard_count)
# We can break on both blob key and file-within-zip boundaries.
# A shard will span at minimum a single blob key, but may only
# handle a few files within a blob.
size_per_shard = total_size // shard_count
readers = []
for blob_key in blob_keys:
bfiles = blob_files[blob_key]
current_shard_size = 0
start_file_index = 0
next_file_index = 0
for fileinfo in bfiles:
next_file_index += 1
current_shard_size += fileinfo.file_size
if current_shard_size >= size_per_shard:
readers.append(cls(blob_key, start_file_index, next_file_index, 0,
_reader))
current_shard_size = 0
start_file_index = next_file_index
if current_shard_size != 0:
readers.append(cls(blob_key, start_file_index, next_file_index, 0,
_reader))
return readers
示例14: get
# 需要导入模块: from google.appengine.ext import blobstore [as 别名]
# 或者: from google.appengine.ext.blobstore import BlobReader [as 别名]
def get(self):
# Get the default Cloud Storage Bucket name and create a file name for
# the object in Cloud Storage.
bucket = app_identity.get_default_gcs_bucket_name()
# Cloud Storage file names are in the format /bucket/object.
filename = '/{}/blobreader_demo'.format(bucket)
# Create a file in Google Cloud Storage and write something to it.
with cloudstorage.open(filename, 'w') as filehandle:
filehandle.write('abcde\n')
# In order to read the contents of the file using the Blobstore API,
# you must create a blob_key from the Cloud Storage file name.
# Blobstore expects the filename to be in the format of:
# /gs/bucket/object
blobstore_filename = '/gs{}'.format(filename)
blob_key = blobstore.create_gs_key(blobstore_filename)
# [START gae_blobstore_reader]
# Instantiate a BlobReader for a given Blobstore blob_key.
blob_reader = blobstore.BlobReader(blob_key)
# Instantiate a BlobReader for a given Blobstore blob_key, setting the
# buffer size to 1 MB.
blob_reader = blobstore.BlobReader(blob_key, buffer_size=1048576)
# Instantiate a BlobReader for a given Blobstore blob_key, setting the
# initial read position.
blob_reader = blobstore.BlobReader(blob_key, position=0)
# Read the entire value into memory. This may take a while depending
# on the size of the value and the size of the read buffer, and is not
# recommended for large values.
blob_reader_data = blob_reader.read()
# Write the contents to the response.
self.response.headers['Content-Type'] = 'text/plain'
self.response.write(blob_reader_data)
# Set the read position back to 0, then read and write 3 bytes.
blob_reader.seek(0)
blob_reader_data = blob_reader.read(3)
self.response.write(blob_reader_data)
self.response.write('\n')
# Set the read position back to 0, then read and write one line (up to
# and including a '\n' character) at a time.
blob_reader.seek(0)
for line in blob_reader:
self.response.write(line)
# [END gae_blobstore_reader]
# Delete the file from Google Cloud Storage using the blob_key.
blobstore.delete(blob_key)
示例15: __init__
# 需要导入模块: from google.appengine.ext import blobstore [as 别名]
# 或者: from google.appengine.ext.blobstore import BlobReader [as 别名]
def __init__(self, open_request, file_storage):
"""Constructor.
Args:
open_request: An instance of open file request.
file_storage: An instance of BlobstoreStorage.
"""
self.filename = open_request.filename()
self.file_storage = file_storage
self.blob_reader = None
self.content_type = None
self.mime_content_type = None
open_mode = open_request.open_mode()
content_type = open_request.content_type()
if not self.filename.startswith(_BLOBSTORE_DIRECTORY):
if not self.file_storage.has_blobstore_file(self.filename):
raise_error(file_service_pb.FileServiceErrors.INVALID_FILE_NAME)
self.ticket = self.filename[len(_BLOBSTORE_DIRECTORY):]
if open_mode == file_service_pb.OpenRequest.APPEND:
if not self.file_storage.has_blobstore_file(self.filename):
raise_error(file_service_pb.FileServiceErrors.EXISTENCE_ERROR)
if self.file_storage.is_finalized(self.filename):
raise_error(file_service_pb.FileServiceErrors.FINALIZATION_ERROR,
'File is already finalized')
self.mime_content_type = self.file_storage.get_content_type(self.filename)
self.blob_file_name = self.file_storage.get_blob_file_name(self.filename)
else:
if self.ticket.startswith(files._CREATION_HANDLE_PREFIX):
blobkey = self.file_storage.get_blob_key(self.ticket)
if not blobkey:
raise_error(file_service_pb.FileServiceErrors.FINALIZATION_ERROR,
'Blobkey not found.')
else:
blobkey = self.ticket
blob_info = blobstore.BlobInfo.get(blobkey)
if not blob_info:
raise_error(file_service_pb.FileServiceErrors.FINALIZATION_ERROR,
'Blobinfo not found.')
self.blob_reader = blobstore.BlobReader(blob_info)
self.mime_content_type = blob_info.content_type
if content_type != file_service_pb.FileContentType.RAW:
raise_error(file_service_pb.FileServiceErrors.WRONG_CONTENT_TYPE)