本文整理汇总了Python中oio.blob.client.BlobClient.chunk_get方法的典型用法代码示例。如果您正苦于以下问题:Python BlobClient.chunk_get方法的具体用法?Python BlobClient.chunk_get怎么用?Python BlobClient.chunk_get使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类oio.blob.client.BlobClient
的用法示例。
在下文中一共展示了BlobClient.chunk_get方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: TestContentFactory
# 需要导入模块: from oio.blob.client import BlobClient [as 别名]
# 或者: from oio.blob.client.BlobClient import chunk_get [as 别名]
#.........这里部分代码省略.........
def test_change_content_0_byte_policy_twocopies_to_threecopies(self):
self._test_change_policy(0, "TWOCOPIES", "THREECOPIES")
def test_change_content_chunksize_bytes_policy_single_to_twocopies(self):
self._test_change_policy(self.chunk_size, "SINGLE", "TWOCOPIES")
def test_change_content_2xchunksize_bytes_policy_3copies_to_single(self):
self._test_change_policy(self.chunk_size * 2, "THREECOPIES", "SINGLE")
def test_change_content_with_same_policy(self):
data = random_data(10)
old_content = self._new_content("TWOCOPIES", data)
changed_content = self.content_factory.change_policy(
old_content.container_id, old_content.content_id, "TWOCOPIES")
self.assertEqual(old_content.content_id, changed_content.content_id)
def test_change_policy_unknown_content(self):
self.assertRaises(ContentNotFound, self.content_factory.change_policy,
self.container_id, "1234", "SINGLE")
def test_change_policy_unknown_storage_policy(self):
data = random_data(10)
old_content = self._new_content("TWOCOPIES", data)
self.assertRaises(ClientException, self.content_factory.change_policy,
self.container_id, old_content.content_id, "UnKnOwN")
def _test_move_chunk(self, policy):
data = random_data(self.chunk_size)
content = self._new_content(policy, data)
chunk_id = content.chunks.filter(metapos=0)[0].id
chunk_url = content.chunks.filter(metapos=0)[0].url
chunk_meta, chunk_stream = self.blob_client.chunk_get(chunk_url)
chunk_hash = md5_stream(chunk_stream)
new_chunk = content.move_chunk(chunk_id)
content_updated = self.content_factory.get(self.container_id,
content.content_id)
hosts = []
for c in content_updated.chunks.filter(metapos=0):
self.assertThat(hosts, Not(Contains(c.host)))
self.assertNotEquals(c.id, chunk_id)
hosts.append(c.host)
new_chunk_meta, new_chunk_stream = self.blob_client.chunk_get(
new_chunk["url"])
new_chunk_hash = md5_stream(new_chunk_stream)
self.assertEqual(new_chunk_hash, chunk_hash)
del chunk_meta["chunk_id"]
del new_chunk_meta["chunk_id"]
self.assertEqual(new_chunk_meta, chunk_meta)
def test_single_move_chunk(self):
self._test_move_chunk("SINGLE")
def test_twocopies_move_chunk(self):
self._test_move_chunk("TWOCOPIES")
def test_rain_move_chunk(self):
if len(self.conf['rawx']) < 9:
self.skipTest("Need more than 8 rawx")
self._test_move_chunk("RAIN")
示例2: TestDupContent
# 需要导入模块: from oio.blob.client import BlobClient [as 别名]
# 或者: from oio.blob.client.BlobClient import chunk_get [as 别名]
class TestDupContent(BaseTestCase):
def setUp(self):
super(TestDupContent, self).setUp()
if len(self.conf['rawx']) < 3:
self.skipTest("Not enough rawx. "
"Dup tests needs more than 2 rawx to run")
self.namespace = self.conf['namespace']
self.account = self.conf['account']
self.chunk_size = self.conf['chunk_size']
self.gridconf = {"namespace": self.namespace}
self.content_factory = ContentFactory(self.gridconf)
self.container_client = ContainerClient(self.gridconf)
self.blob_client = BlobClient()
self.container_name = "TestDupContent%f" % time.time()
self.container_client.container_create(acct=self.account,
ref=self.container_name)
self.container_id = cid_from_name(self.account,
self.container_name).upper()
def tearDown(self):
super(TestDupContent, self).tearDown()
def _test_upload(self, stgpol, data_size):
data = random_data(data_size)
content = self.content_factory.new(self.container_id, "titi",
len(data), stgpol)
self.assertEqual(type(content), DupContent)
content.upload(StringIO.StringIO(data))
meta, chunks = self.container_client.content_show(
cid=self.container_id, content=content.content_id)
chunks = ChunksHelper(chunks)
self.assertEqual(meta['hash'], md5_data(data))
self.assertEqual(meta['length'], str(len(data)))
self.assertEqual(meta['policy'], stgpol)
self.assertEqual(meta['name'], "titi")
metachunk_nb = int(math.ceil(float(len(data)) / self.chunk_size))
if metachunk_nb == 0:
metachunk_nb = 1 # special case for empty content
if stgpol == "THREECOPIES":
nb_copy = 3
elif stgpol == "TWOCOPIES":
nb_copy = 2
elif stgpol == "SINGLE":
nb_copy = 1
self.assertEqual(len(chunks), metachunk_nb * nb_copy)
for pos in range(metachunk_nb):
chunks_at_pos = chunks.filter(pos=pos)
self.assertEqual(len(chunks_at_pos), nb_copy)
data_begin = pos * self.chunk_size
data_end = pos * self.chunk_size + self.chunk_size
chunk_hash = md5_data(data[data_begin:data_end])
for chunk in chunks_at_pos:
meta, stream = self.blob_client.chunk_get(chunk.url)
self.assertEqual(md5_stream(stream), chunk_hash)
self.assertEqual(meta['content_size'], str(len(data)))
self.assertEqual(meta['content_path'], "titi")
self.assertEqual(meta['content_cid'], self.container_id)
self.assertEqual(meta['content_id'], meta['content_id'])
self.assertEqual(meta['chunk_id'], chunk.id)
self.assertEqual(meta['chunk_pos'], str(pos))
self.assertEqual(meta['chunk_hash'], chunk_hash)
def test_twocopies_upload_0_byte(self):
self._test_upload("TWOCOPIES", 0)
def test_twocopies_upload_1_byte(self):
self._test_upload("TWOCOPIES", 1)
def test_twocopies_upload_chunksize_bytes(self):
self._test_upload("TWOCOPIES", self.chunk_size)
def test_twocopies_upload_chunksize_plus_1_bytes(self):
self._test_upload("TWOCOPIES", self.chunk_size + 1)
def test_single_upload_0_byte(self):
self._test_upload("SINGLE", 0)
def test_single_upload_chunksize_plus_1_bytes(self):
self._test_upload("SINGLE", self.chunk_size + 1)
def test_chunks_cleanup_when_upload_failed(self):
data = random_data(2 * self.chunk_size)
content = self.content_factory.new(self.container_id, "titi",
len(data), "TWOCOPIES")
self.assertEqual(type(content), DupContent)
# set bad url for position 1
for chunk in content.chunks.filter(pos=1):
chunk.url = "http://127.0.0.1:9/DEADBEEF"
#.........这里部分代码省略.........
示例3: TestRainContent
# 需要导入模块: from oio.blob.client import BlobClient [as 别名]
# 或者: from oio.blob.client.BlobClient import chunk_get [as 别名]
class TestRainContent(BaseTestCase):
def setUp(self):
super(TestRainContent, self).setUp()
if len(self.conf['rawx']) < 12:
self.skipTest("Not enough rawx. "
"Rain tests needs more than 12 rawx to run")
self.namespace = self.conf['namespace']
self.account = self.conf['account']
self.chunk_size = self.conf['chunk_size']
self.gridconf = {"namespace": self.namespace}
self.content_factory = ContentFactory(self.gridconf)
self.container_client = ContainerClient(self.gridconf)
self.blob_client = BlobClient()
self.container_name = "TestRainContent%f" % time.time()
self.container_client.container_create(acct=self.account,
ref=self.container_name)
self.container_id = cid_from_name(self.account,
self.container_name).upper()
def tearDown(self):
super(TestRainContent, self).tearDown()
def _test_upload(self, data_size):
data = random_data(data_size)
content = self.content_factory.new(self.container_id, "titi",
len(data), "RAIN")
k = 6
m = 2
self.assertEqual(type(content), RainContent)
content.upload(StringIO.StringIO(data))
meta, chunks = self.container_client.content_show(
cid=self.container_id, content=content.content_id)
chunks = ChunksHelper(chunks)
self.assertEqual(meta['hash'], md5_data(data))
self.assertEqual(meta['length'], str(len(data)))
self.assertEqual(meta['policy'], "RAIN")
self.assertEqual(meta['name'], "titi")
metachunk_nb = int(math.ceil(float(len(data)) / self.chunk_size))
if metachunk_nb == 0:
metachunk_nb = 1 # special case for empty content
nb_chunks_min = metachunk_nb * (1 + m)
nb_chunks_max = metachunk_nb * (k + m)
self.assertGreaterEqual(len(chunks), nb_chunks_min)
self.assertLessEqual(len(chunks), nb_chunks_max)
for metapos in range(metachunk_nb):
chunks_at_pos = content.chunks.filter(metapos=metapos)
data_chunks_at_pos = chunks_at_pos.filter(is_parity=False)
parity_chunks_at_pos = chunks_at_pos.filter(is_parity=True)
self.assertEquals(len(data_chunks_at_pos) >= 1, True)
self.assertEquals(len(data_chunks_at_pos) <= k, True)
self.assertEqual(len(parity_chunks_at_pos), m)
for chunk in chunks_at_pos:
meta, stream = self.blob_client.chunk_get(chunk.url)
self.assertEqual(md5_stream(stream), chunk.hash)
self.assertEqual(meta['content_size'], str(len(data)))
self.assertEqual(meta['content_path'], "titi")
self.assertEqual(meta['content_cid'], self.container_id)
self.assertEqual(meta['content_id'], meta['content_id'])
self.assertEqual(meta['chunk_id'], chunk.id)
self.assertEqual(meta['chunk_pos'], chunk.pos)
self.assertEqual(meta['chunk_hash'], chunk.hash)
data_begin = metapos * self.chunk_size
data_end = metapos * self.chunk_size + self.chunk_size
target_metachunk_hash = md5_data(data[data_begin:data_end])
metachunk_hash = hashlib.md5()
for chunk in data_chunks_at_pos:
meta, stream = self.blob_client.chunk_get(chunk.url)
for d in stream:
metachunk_hash.update(d)
self.assertEqual(metachunk_hash.hexdigest().upper(),
target_metachunk_hash)
def test_upload_0_byte(self):
self._test_upload(0)
def test_upload_1_byte(self):
self._test_upload(1)
def test_upload_chunksize_bytes(self):
self._test_upload(self.chunk_size)
def test_upload_chunksize_plus_1_bytes(self):
self._test_upload(self.chunk_size + 1)
def test_chunks_cleanup_when_upload_failed(self):
data = random_data(2 * self.chunk_size)
content = self.content_factory.new(self.container_id, "titi",
len(data), "RAIN")
self.assertEqual(type(content), RainContent)
#.........这里部分代码省略.........
示例4: TestRebuilderCrawler
# 需要导入模块: from oio.blob.client import BlobClient [as 别名]
# 或者: from oio.blob.client.BlobClient import chunk_get [as 别名]
class TestRebuilderCrawler(BaseTestCase):
def setUp(self):
super(TestRebuilderCrawler, self).setUp()
self.namespace = self.conf['namespace']
self.account = self.conf['account']
self.gridconf = {"namespace": self.namespace}
self.container_client = ContainerClient(self.gridconf)
self.blob_client = BlobClient()
self.container_name = "TestRebuilderCrawler%d" % int(time.time())
self.container_client.container_create(acct=self.account,
ref=self.container_name)
def _push_content(self, content):
for c in content.chunks:
self.blob_client.chunk_put(c.url, c.get_create_xattr(), c.data)
self.container_client.content_create(acct=content.account,
ref=content.container_name,
path=content.content_name,
size=content.size,
checksum=content.hash,
content_id=content.content_id,
stgpol=content.stgpol,
data=content.get_create_meta2())
def tearDown(self):
super(TestRebuilderCrawler, self).tearDown()
def test_rebuild_chunk(self):
# push a new content
content = TestContent(self.conf, self.account,
self.container_name, "mycontent", "TWOCOPIES")
data = "azerty"
content.add_chunk(data, pos='0', rawx=0)
content.add_chunk(data, pos='0', rawx=1)
self._push_content(content)
# rebuild the first rawx
rebuilder = BlobRebuilderWorker(self.gridconf, None,
self.conf['rawx'][0]['addr'])
rebuilder.chunk_rebuild(content.container_id, content.content_id,
content.chunks[0].id)
# check meta2 information
_, res = self.container_client.content_show(acct=content.account,
ref=content.container_name,
content=content.content_id)
new_chunk_info = None
for c in res:
if (c['url'] != content.chunks[0].url and
c['url'] != content.chunks[1].url):
new_chunk_info = c
new_chunk_id = new_chunk_info['url'].split('/')[-1]
self.assertEqual(new_chunk_info['hash'], content.chunks[0].hash)
self.assertEqual(new_chunk_info['pos'], content.chunks[0].pos)
self.assertEqual(new_chunk_info['size'], content.chunks[0].size)
# check chunk information
meta, stream = self.blob_client.chunk_get(new_chunk_info['url'])
self.assertEqual(meta['content_size'], str(content.chunks[0].size))
self.assertEqual(meta['content_path'], content.content_name)
self.assertEqual(meta['content_cid'], content.container_id)
self.assertEqual(meta['content_id'], content.content_id)
self.assertEqual(meta['chunk_id'], new_chunk_id)
self.assertEqual(meta['chunk_pos'], content.chunks[0].pos)
self.assertEqual(meta['content_version'], content.version)
self.assertEqual(meta['chunk_hash'], content.chunks[0].hash)
self.assertEqual(stream.next(), content.chunks[0].data)
# check rtime flag in rdir
rdir_client = RdirClient(self.gridconf)
res = rdir_client.chunk_fetch(self.conf['rawx'][0]['addr'])
key = (content.container_id, content.content_id, content.chunks[0].id)
for i_container, i_content, i_chunk, i_value in res:
if (i_container, i_content, i_chunk) == key:
check_value = i_value
self.assertIsNotNone(check_value.get('rtime'))
@unittest.skipIf(len(get_config()['rawx']) != 3,
"The number of rawx must be 3")
def test_rebuild_no_spare(self):
# push a new content
content = TestContent(self.conf, self.account,
self.container_name, "mycontent", "THREECOPIES")
data = "azerty"
content.add_chunk(data, pos='0', rawx=0)
content.add_chunk(data, pos='0', rawx=1)
content.add_chunk(data, pos='0', rawx=2)
#.........这里部分代码省略.........
示例5: TestECContent
# 需要导入模块: from oio.blob.client import BlobClient [as 别名]
# 或者: from oio.blob.client.BlobClient import chunk_get [as 别名]
class TestECContent(BaseTestCase):
def setUp(self):
super(TestECContent, self).setUp()
if len(self.conf['services']['rawx']) < 12:
self.skipTest("Not enough rawx. "
"EC tests needs at least 12 rawx to run")
self.namespace = self.conf['namespace']
self.account = self.conf['account']
self.chunk_size = self.conf['chunk_size']
self.gridconf = {"namespace": self.namespace}
self.content_factory = ContentFactory(self.gridconf)
self.container_client = ContainerClient(self.gridconf)
self.blob_client = BlobClient()
self.container_name = "TestECContent%f" % time.time()
self.container_client.container_create(acct=self.account,
ref=self.container_name)
self.container_id = cid_from_name(self.account,
self.container_name).upper()
self.content = random_str(64)
self.stgpol = "EC"
self.size = 1024*1024 + 320
self.k = 6
self.m = 3
def tearDown(self):
super(TestECContent, self).tearDown()
def random_chunks(self, nb):
l = random.sample(xrange(self.k + self.m), nb)
return ["0.%s" % i for i in l]
def _test_create(self, data_size):
# generate random test data
data = random_data(data_size)
# using factory create new EC content
content = self.content_factory.new(
self.container_id, self.content, len(data), self.stgpol)
# verify the factory gave us an ECContent
self.assertEqual(type(content), ECContent)
# perform the content creation
content.create(StringIO(data))
meta, chunks = self.container_client.content_show(
cid=self.container_id, content=content.content_id)
# verify metadata
chunks = ChunksHelper(chunks)
self.assertEqual(meta['hash'], md5_data(data))
self.assertEqual(meta['length'], str(len(data)))
self.assertEqual(meta['policy'], self.stgpol)
self.assertEqual(meta['name'], self.content)
metachunk_nb = int(math.ceil(float(len(data)) / self.chunk_size)) \
if len(data) != 0 else 1
# verify each metachunk
for metapos in range(metachunk_nb):
chunks_at_pos = content.chunks.filter(metapos=metapos)
for chunk in chunks_at_pos:
meta, stream = self.blob_client.chunk_get(chunk.url)
self.assertEqual(meta['metachunk_size'], str(chunk.size))
self.assertEqual(meta['metachunk_hash'], chunk.checksum)
self.assertEqual(meta['content_path'], self.content)
self.assertEqual(meta['container_id'], self.container_id)
self.assertEqual(meta['content_id'], meta['content_id'])
self.assertEqual(meta['chunk_id'], chunk.id)
self.assertEqual(meta['chunk_pos'], chunk.pos)
self.assertEqual(meta['chunk_hash'], md5_stream(stream))
def test_create_0_byte(self):
self._test_create(0)
def test_create_1_byte(self):
self._test_create(1)
def test_create(self):
self._test_create(DAT_LEGIT_SIZE)
def _test_rebuild(self, data_size, broken_pos_list):
# generate test data
data = os.urandom(data_size)
# create initial content
old_content = self.content_factory.new(
self.container_id, self.content, len(data), self.stgpol)
# verify factory work as intended
self.assertEqual(type(old_content), ECContent)
# perform initial content creation
old_content.create(StringIO(data))
uploaded_content = self.content_factory.get(self.container_id,
old_content.content_id)
# break the content
old_info = {}
for pos in broken_pos_list:
old_info[pos] = {}
#.........这里部分代码省略.........