本文整理汇总了Python中oio.blob.client.BlobClient.chunk_copy方法的典型用法代码示例。如果您正苦于以下问题:Python BlobClient.chunk_copy方法的具体用法?Python BlobClient.chunk_copy怎么用?Python BlobClient.chunk_copy使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类oio.blob.client.BlobClient
的用法示例。
在下文中一共展示了BlobClient.chunk_copy方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: Content
# 需要导入模块: from oio.blob.client import BlobClient [as 别名]
# 或者: from oio.blob.client.BlobClient import chunk_copy [as 别名]
#.........这里部分代码省略.........
def _meta2_get_spare_chunk(self, chunks_notin, chunks_broken):
spare_data = {
"notin": ChunksHelper(chunks_notin, False).raw(),
"broken": ChunksHelper(chunks_broken, False).raw()
}
try:
spare_resp = self.container_client.content_spare(
cid=self.container_id, content=self.content_id,
data=spare_data, stgpol=self.stgpol_name)
except ClientException as e:
raise exc.SpareChunkException("No spare chunk (%s)" % e.message)
url_list = []
for c in spare_resp["chunks"]:
url_list.append(c["id"])
return url_list
def _meta2_update_spare_chunk(self, current_chunk, new_url):
old = [{'type': 'chunk',
'id': current_chunk.url,
'hash': current_chunk.hash,
'size': current_chunk.size,
'pos': current_chunk.pos,
'content': self.content_id}]
new = [{'type': 'chunk',
'id': new_url,
'hash': current_chunk.hash,
'size': current_chunk.size,
'pos': current_chunk.pos,
'content': self.content_id}]
update_data = {'old': old, 'new': new}
self.container_client.container_raw_update(
cid=self.container_id, data=update_data)
def _meta2_create_object(self):
self.container_client.content_create(cid=self.container_id,
path=self.path,
content_id=self.content_id,
stgpol=self.stgpol_name,
size=self.length,
checksum=self.hash,
version=self.version,
chunk_method=self.chunk_method,
mime_type=self.mime_type,
data=self.chunks.raw())
def rebuild_chunk(self, chunk_id):
raise NotImplementedError()
def upload(self, stream):
try:
self._upload(stream)
except:
# Keep the stack trace
exc_info = sys.exc_info()
for chunk in self.chunks:
try:
self.blob_client.chunk_delete(chunk.url)
except:
self.logger.warn("Failed to delete %s", chunk.url)
# Raise with the original stack trace
raise exc_info[0], exc_info[1], exc_info[2]
def _upload(self, stream):
raise NotImplementedError()
def download(self):
raise NotImplementedError()
def delete(self):
self.container_client.content_delete(cid=self.container_id,
path=self.path)
def move_chunk(self, chunk_id):
current_chunk = self.chunks.filter(id=chunk_id).one()
if current_chunk is None:
raise OrphanChunk("Chunk not found in content")
other_chunks = self.chunks.filter(
metapos=current_chunk.metapos).exclude(id=chunk_id).all()
spare_urls = self._meta2_get_spare_chunk(other_chunks,
[current_chunk])
self.logger.debug("copy chunk from %s to %s",
current_chunk.url, spare_urls[0])
self.blob_client.chunk_copy(current_chunk.url, spare_urls[0])
self._meta2_update_spare_chunk(current_chunk, spare_urls[0])
try:
self.blob_client.chunk_delete(current_chunk.url)
except:
self.logger.warn("Failed to delete chunk %s" % current_chunk.url)
current_chunk.url = spare_urls[0]
return current_chunk.raw()
示例2: BlobMoverWorker
# 需要导入模块: from oio.blob.client import BlobClient [as 别名]
# 或者: from oio.blob.client.BlobClient import chunk_copy [as 别名]
#.........这里部分代码省略.........
'%(errors)d '
'%(c_rate).2f '
'%(b_rate).2f '
'%(total).2f '
'%(mover_time).2f'
'%(mover_rate).2f' % {
'start_time': time.ctime(report_time),
'passes': self.passes,
'errors': self.errors,
'c_rate': self.passes / (now - report_time),
'b_rate': self.bytes_processed / (now - report_time),
'total': (now - start_time),
'mover_time': mover_time,
'mover_rate': mover_time / (now - start_time)
}
)
report_time = now
total_errors += self.errors
self.passes = 0
self.bytes_processed = 0
self.last_reported = now
mover_time += (now - loop_time)
elapsed = (time.time() - start_time) or 0.000001
self.logger.info(
'%(elapsed).02f '
'%(errors)d '
'%(chunk_rate).2f '
'%(bytes_rate).2f '
'%(mover_time).2f '
'%(mover_rate).2f' % {
'elapsed': elapsed,
'errors': total_errors + self.errors,
'chunk_rate': self.total_chunks_processed / elapsed,
'bytes_rate': self.total_bytes_processed / elapsed,
'mover_time': mover_time,
'mover_rate': mover_time / elapsed
}
)
def safe_chunk_move(self, path):
try:
self.chunk_move(path)
except Exception as e:
self.errors += 1
self.logger.error('ERROR while moving chunk %s: %s', path, e)
self.passes += 1
def load_chunk_metadata(self, path):
with open(path) as f:
return read_chunk_metadata(f)
def chunk_move(self, path):
meta = self.load_chunk_metadata(path)
content_cid = meta['content_cid']
content_path = meta['content_path']
chunk_url = 'http://%s/%s' % \
(self.address, meta['chunk_id'])
try:
data = self.container_client.content_show(
cid=content_cid, path=content_path)
except exc.NotFound:
raise exc.OrphanChunk('Content not found')
current_chunk = None
notin = []
for c in data:
if c['pos'] == meta['chunk_pos']:
notin.append(c)
for c in notin:
if c['url'] == chunk_url:
current_chunk = c
notin.remove(c)
if not current_chunk:
raise exc.OrphanChunk('Chunk not found in content')
spare_data = {'notin': notin, 'broken': [current_chunk], 'size': 0}
spare_resp = self.container_client.content_spare(
cid=content_cid, path=content_path, data=spare_data)
new_chunk = spare_resp['chunks'][0]
self.blob_client.chunk_copy(
current_chunk['url'], new_chunk['id'])
old = [{'type': 'chunk',
'id': current_chunk['url'],
'hash': meta['chunk_hash'],
'size': int(meta['chunk_size'])}]
new = [{'type': 'chunk',
'id': new_chunk['id'],
'hash': meta['chunk_hash'],
'size': int(meta['chunk_size'])}]
update_data = {'old': old, 'new': new}
self.container_client.container_raw_update(
cid=content_cid, data=update_data)
self.blob_client.chunk_delete(current_chunk['url'])
self.logger.info(
'moved chunk %s to %s', current_chunk['url'], new_chunk['id'])
示例3: BlobRebuilderWorker
# 需要导入模块: from oio.blob.client import BlobClient [as 别名]
# 或者: from oio.blob.client.BlobClient import chunk_copy [as 别名]
#.........这里部分代码省略.........
% (container_id, content_id, chunk_id))
try:
self.chunk_rebuild(container_id, content_id, chunk_id)
except Exception as e:
self.errors += 1
self.logger.error('ERROR while rebuilding chunk %s|%s|%s) : %s',
container_id, content_id, chunk_id, e)
self.passes += 1
def _meta2_get_chunks_at_pos(self, container_id, content_id, chunk_id):
current_chunk_url = 'http://%s/%s' % (self.volume, chunk_id)
try:
data = self.container_client.content_show(
cid=container_id, content=content_id)
except exc.NotFound:
raise exc.OrphanChunk('Content not found')
current_chunk = None
for c in data:
if c['url'] == current_chunk_url:
current_chunk = c
break
if not current_chunk:
raise exc.OrphanChunk('Chunk not found in content')
duplicate_chunks = []
for c in data:
if c['pos'] == current_chunk['pos'] \
and c['url'] != current_chunk['url']:
duplicate_chunks.append(c)
if len(duplicate_chunks) == 0:
raise exc.UnrecoverableContent('No copy of missing chunk')
return current_chunk, duplicate_chunks
def _meta2_get_spare_chunk(self, container_id, content_id, notin, broken):
spare_data = {'notin': notin,
'broken': [broken],
'size': 0}
try:
spare_resp = self.container_client.content_spare(
cid=container_id, content=content_id, data=spare_data)
except ClientException as e:
raise exc.SpareChunkException('No spare chunk (%s)' % e.message)
return spare_resp['chunks'][0]
def _meta2_replace_chunk(self, container_id, content_id,
current_chunk, new_chunk):
old = [{'type': 'chunk',
'id': current_chunk['url'],
'hash': current_chunk['hash'],
'size': current_chunk['size'],
'pos': current_chunk['pos'],
'content': content_id}]
new = [{'type': 'chunk',
'id': new_chunk['id'],
'hash': current_chunk['hash'],
'size': current_chunk['size'],
'pos': current_chunk['pos'],
'content': content_id}]
update_data = {'old': old, 'new': new}
self.container_client.container_raw_update(
cid=container_id, data=update_data)
# TODO rain support
def chunk_rebuild(self, container_id, content_id, chunk_id):
current_chunk, duplicate_chunks = self._meta2_get_chunks_at_pos(
container_id, content_id, chunk_id)
spare_chunk = self._meta2_get_spare_chunk(
container_id, content_id, duplicate_chunks, current_chunk)
uploaded = False
for src in duplicate_chunks:
try:
self.blob_client.chunk_copy(src['url'], spare_chunk['id'])
self.logger.debug('copy chunk from %s to %s',
src['url'], spare_chunk['id'])
uploaded = True
break
except Exception as e:
self.logger.debug('Failed to copy chunk from %s to %s: %s',
src['url'], spare_chunk['id'], type(e))
if not uploaded:
raise exc.UnrecoverableContent('No copy available '
'of missing chunk')
self._meta2_replace_chunk(container_id, content_id,
current_chunk, spare_chunk)
self.rdir_client.chunk_push(self.volume, container_id, content_id,
chunk_id, rtime=int(time.time()))
self.bytes_processed += current_chunk['size']
self.total_bytes_processed += current_chunk['size']