本文整理汇总了Python中ubuntuone.storageprotocol.content_hash.content_hash_factory函数的典型用法代码示例。如果您正苦于以下问题:Python content_hash_factory函数的具体用法?Python content_hash_factory怎么用?Python content_hash_factory使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了content_hash_factory函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_putcontent_unlinked
def test_putcontent_unlinked(self):
"""Try to put content in an unlinked file."""
empty_hash = content_hash_factory().content_hash()
data = "*"
size = 1
hash_object = content_hash_factory()
hash_object.update(data)
hash_value = hash_object.content_hash()
crc32_value = crc32(data)
def auth(client):
# setup
d = client.dummy_authenticate("open sesame")
d.addCallback(lambda r: client.get_root())
# create file and remove it
d.addCallback(lambda r: client.make_file(request.ROOT, r, "hola"))
d.addCallback(lambda req: self._save_state("file", req.new_id))
d.addCallback(lambda _: client.unlink(request.ROOT,
self._state.file))
# try to put content
d.addCallback(lambda _: client.put_content(
request.ROOT, self._state.file, empty_hash, hash_value,
crc32_value, size, StringIO(data)))
d.addCallbacks(client.test_fail, lambda x: client.test_done("ok"))
return self.callback_test(auth)
示例2: _hash
def _hash(self, path):
"""Actually hashes a file."""
hasher = content_hash_factory()
crc = 0
size = 0
try:
initial_stat = stat_path(path)
with open_file(path, 'rb') as fh:
while True:
# stop hashing if path_to_cancel = path or _stopped is True
with self.mutex:
path_to_cancel = self._should_cancel
if path_to_cancel == path or self._stopped:
raise StopHashing('hashing of %r was cancelled' % path)
cont = fh.read(self.chunk_size)
if not cont:
break
hasher.update(cont)
crc = crc32(cont, crc)
size += len(cont)
finally:
with self.mutex:
self._should_cancel = None
return hasher.content_hash(), crc, size, initial_stat
示例3: test_put_content
def test_put_content(self):
"""Write a file."""
data = "*" * 10000
deflated_data = zlib.compress(data)
hash_object = content_hash_factory()
hash_object.update(data)
hash_value = hash_object.content_hash()
crc32_value = crc32(data)
size = len(data)
deflated_size = len(deflated_data)
def auth(client):
"""Authenticate and test."""
d = client.dummy_authenticate("open sesame")
d.addCallback(lambda _: client.create_udf(u"~", u"myudf"))
d.addCallback(self.save_req, "udf")
# create a file with content
d.addCallback(lambda r: client.make_file(self._state.udf.volume_id,
self._state.udf.node_id,
"foo"))
# put content
d.addCallback(lambda req: client.put_content(
self._state.udf.volume_id, req.new_id,
NO_CONTENT_HASH, hash_value, crc32_value, size,
deflated_size, StringIO(deflated_data)))
d.addCallbacks(client.test_done, client.test_fail)
return self.callback_test(auth)
示例4: test_getcontent_file
def test_getcontent_file(self, check_file_content=True):
"""Get the content from a file."""
data = os.urandom(300000)
deflated_data = zlib.compress(data)
hash_object = content_hash_factory()
hash_object.update(data)
hash_value = hash_object.content_hash()
crc32_value = crc32(data)
size = len(data)
deflated_size = len(deflated_data)
def check_file(req):
if req.data != deflated_data:
raise Exception("data does not match")
def auth(client):
d = client.dummy_authenticate("open sesame")
d.addCallbacks(lambda _: client.get_root(), client.test_fail)
d.addCallbacks(
lambda root: client.make_file(request.ROOT, root, "hola"),
client.test_fail)
d.addCallback(self.save_req, 'req')
d.addCallbacks(
lambda mkfile_req: client.put_content(
request.ROOT,
mkfile_req.new_id, NO_CONTENT_HASH, hash_value,
crc32_value, size, deflated_size, StringIO(deflated_data)),
client.test_fail)
d.addCallback(lambda _: client.get_content(
request.ROOT, self._state.req.new_id, hash_value))
if check_file_content:
d.addCallback(check_file)
d.addCallbacks(client.test_done, client.test_fail)
return self.callback_test(auth, timeout=1.5)
示例5: test_proxy_producer_streaming
def test_proxy_producer_streaming(self):
"""Test ProxyHashingProducer."""
data = os.urandom(1024 * 10)
message = zlib.compress(data)
ds = diskstorage.DiskStorage(os.path.join(self.tmpdir, "testfile"))
consumer = ds.put("somenode")
producer = upload.ProxyHashingProducer(consumer, True)
chunk_sz = 10
for part in xrange(0, len(message), chunk_sz):
yield producer.dataReceived(message[part:part + chunk_sz])
producer.stopProducing()
yield producer.flush_decompressor()
with open(consumer.filepath, "rb") as fh:
self.assertEqual(fh.read(), message)
hasher = content_hash_factory()
hasher.update(data)
self.assertEqual(producer.hash_object.content_hash(),
hasher.content_hash())
magic_hasher = magic_hash_factory()
magic_hasher.update(data)
self.assertEqual(producer.magic_hash_object.content_hash()._magic_hash,
magic_hasher.content_hash()._magic_hash)
self.assertEqual(producer.inflated_size, len(data))
self.assertEqual(producer.crc32, crc32(data))
示例6: test_upload
def test_upload(self):
"""Hiccup the network in the middle of an upload."""
data = os.urandom(1000)
hash_object = content_hash_factory()
hash_object.update(data)
hash_value = hash_object.content_hash()
crc32_value = crc32(data)
size = len(data)
self.patch(self.main.fs, 'open_file', lambda mdid: StringIO(data))
mdid, node_id = yield self._mkfile('hola')
def worker():
"""Async worker."""
self.aq.upload('', node_id, NO_CONTENT_HASH, hash_value,
crc32_value, size, mdid)
return self.hiccup()
d = self.wait_for_nirvana()
d.addCallback(lambda _: self.nuke_client_method(
'put_content_request', worker, lambda: self.connlost_deferred))
self.assertInQ(d, lambda: ('AQ_UPLOAD_FINISHED',
{'share_id': '',
'hash': hash_value,
'node_id': anUUID,
'new_generation': 2L}))
yield d
示例7: test_shutdown_while_hashing
def test_shutdown_while_hashing(self):
"""Test that the HashQueue is shutdown ASAP while it's hashing."""
# create large data in order to test
testinfo = os.urandom(500000)
hasher = content_hash_factory()
hasher.hash_object.update(testinfo)
testfile = os.path.join(self.test_dir, "testfile")
# send what to hash
with open_file(testfile, "wb") as fh:
fh.write(testinfo)
class Helper(object):
"""Helper class."""
def push(self, event, **kwargs):
"""Callback."""
receiver = Helper()
hq = hash_queue.HashQueue(receiver)
# read in small chunks, so we have more iterations
hq.hasher.chunk_size = 2**10
hq.insert(testfile, "mdid")
time.sleep(0.1)
hq.shutdown()
# block until the hash is stopped and the queue is empty
# a shutdown clears the queue
hq._queue.join()
self.assertFalse(hq.hasher.hashing)
self.assertTrue(hq.hasher._stopped)
#self.assertFalse(hq.hasher.isAlive())
self.assertTrue(hq._queue.empty())
示例8: test_get_content_on_share
def test_get_content_on_share(self):
"""read a file on a share."""
data = ""
deflated_data = zlib.compress(data)
hash_object = content_hash_factory()
hash_object.update(data)
hash_value = hash_object.content_hash()
crc32_value = crc32(data)
size = len(data)
deflated_size = len(deflated_data)
def auth(client):
"""auth"""
d = client.dummy_authenticate("open sesame")
# need to put data to be able to retrieve it!
d.addCallback(
lambda r: client.put_content(
self.share_modify, self.filerw, NO_CONTENT_HASH,
hash_value, crc32_value, size, deflated_size,
StringIO(deflated_data)))
d.addCallback(
lambda r: client.get_content(
self.share_modify, self.filerw, EMPTY_HASH))
d.addCallbacks(client.test_done, client.test_fail)
return self.callback_test(auth)
示例9: make_file_with_content
def make_file_with_content(root, name, mimetype=None):
"""Make a file with content."""
hash = content_hash.content_hash_factory()
hash.update(str(uuid.uuid4()))
hashstr = hash.content_hash()
root.make_file_with_content(name, hashstr, 10, 100, 100, uuid.uuid4(),
mimetype=mimetype)
示例10: test_getcontent_file_slow
def test_getcontent_file_slow(self):
"""Get content from a file with very low BW and fail with timeout."""
data = os.urandom(300000)
deflated_data = zlib.compress(data)
hash_object = content_hash_factory()
hash_object.update(data)
hash_value = hash_object.content_hash()
crc32_value = crc32(data)
size = len(data)
deflated_size = len(deflated_data)
@defer.inlineCallbacks
def auth(client):
"""Test."""
yield client.dummy_authenticate("open sesame")
root = yield client.get_root()
# make a file and put content in it
mkfile_req = yield client.make_file(request.ROOT, root, "hola")
yield client.put_content(request.ROOT, mkfile_req.new_id,
NO_CONTENT_HASH, hash_value, crc32_value,
size, deflated_size,
StringIO(deflated_data))
# set the read limit, and get content
client.factory.factory.readLimit = 1000
yield client.get_content(request.ROOT, mkfile_req.new_id,
hash_value)
d = self.callback_test(auth, add_default_callbacks=True,
timeout=0.1)
err = yield self.assertFailure(d, Exception)
self.assertEqual(str(err), "timeout")
示例11: __init__
def __init__(self, consumer, streaming):
self.decompressor = zlib.decompressobj()
self.hash_object = content_hash_factory()
self.magic_hash_object = magic_hash_factory()
self.crc32 = 0
self.inflated_size = 0
self.deflated_size = 0
self.consumer = consumer
self.streaming = streaming
示例12: test_putcontent_slow
def test_putcontent_slow(self, num_files=1):
"""Test putting content to a file with very low bandwidth and fail
with timeout.
"""
data = os.urandom(30000)
deflated_data = zlib.compress(data)
hash_object = content_hash_factory()
hash_object.update(data)
hash_value = hash_object.content_hash()
crc32_value = crc32(data)
size = len(data)
deflated_size = len(deflated_data)
def auth(client):
def check_file(result):
def _check_file():
filesync_tm.begin()
try:
store = get_filesync_store()
content_blob = store.get(ContentBlob, hash_value)
if not content_blob:
raise ValueError("content blob is not there")
finally:
filesync_tm.abort()
d = threads.deferToThread(_check_file)
return d
d = client.dummy_authenticate("open sesame")
filename = "hola_1"
d.addCallbacks(lambda _: client.get_root(), client.test_fail)
d.addCallbacks(lambda root: client.make_file(request.ROOT, root, filename), client.test_fail)
def set_write_limit(r):
client.factory.factory.writeLimit = 100
return r
d.addCallback(set_write_limit)
d.addCallbacks(
lambda mkfile_req: client.put_content(
request.ROOT,
mkfile_req.new_id,
NO_CONTENT_HASH,
hash_value,
crc32_value,
size,
deflated_size,
StringIO(deflated_data),
),
client.test_fail,
)
return d
d1 = defer.Deferred()
test_d = self.callback_test(auth, timeout=1)
test_d.addCallbacks(d1.errback, lambda r: d1.callback(None))
return d1
示例13: test_unique
def test_unique(self):
"""The hasher should return in order."""
# calculate what we should receive
should_be = []
for i in range(10):
hasher = content_hash_factory()
text = "supercalifragilistico"+str(i)
hasher.hash_object.update(text)
tfile = os.path.join(self.test_dir, "tfile"+str(i))
with open_file(tfile, "wb") as fh:
fh.write("supercalifragilistico"+str(i))
d = dict(path=tfile, hash=hasher.content_hash(),
crc32=crc32(text), size=len(text), stat=stat_path(tfile))
should_be.append(("HQ_HASH_NEW", d))
d = defer.Deferred()
class Helper(object):
"""Helper class."""
# class-closure, cannot use self, pylint: disable-msg=E0213
def __init__(innerself):
innerself.store = []
def push(innerself, event, **kwargs):
"""Callback."""
innerself.store.append((event, kwargs))
if len(innerself.store) == 10:
if innerself.store == should_be:
d.callback(True)
else:
d.errback(Exception("are different!"))
receiver = Helper()
hq = hash_queue.HashQueue(receiver)
self.addCleanup(hq.shutdown)
# stop the hasher so we can test the unique items in the queue
hq.hasher.stop()
self.log.debug('Hasher stopped (forced)')
# allow the hasher to fully stop
time.sleep(0.1)
# create a new hasher just like the HashQueue creates it
hq.hasher = hash_queue._Hasher(hq._queue, hq._end_mark, receiver)
hq.hasher.setDaemon(True)
# send to hash twice
for i in range(10):
tfile = os.path.join(self.test_dir, "tfile"+str(i))
hq.insert(tfile, "mdid")
hq.insert(tfile, "mdid")
# start the hasher
self.log.debug('Hasher started (forced)')
hq.hasher.start()
# insert the last item to check the uniqueness in the queue while
# the hasher is running
for i in range(9, 10):
tfile = os.path.join(self.test_dir, "tfile"+str(i))
hq.insert(tfile, "mdid")
return d
示例14: _get_data
def _get_data(self, data_len=1000):
"""Get the hash, crc and size of a chunk of data."""
data = os.urandom(data_len) # not terribly compressible
hash_object = content_hash_factory()
hash_object.update(data)
hash_value = hash_object.content_hash()
crc32_value = crc32(data)
size = len(data)
return NoCloseStringIO(data), data, hash_value, crc32_value, size
示例15: test_putcontent
def test_putcontent(self, num_files=1):
"""Test putting content to a file."""
data = os.urandom(300000)
deflated_data = zlib.compress(data)
hash_object = content_hash_factory()
hash_object.update(data)
hash_value = hash_object.content_hash()
crc32_value = crc32(data)
size = len(data)
deflated_size = len(deflated_data)
def auth(client):
def check_file(result):
def _check_file():
filesync_tm.begin()
try:
store = get_filesync_store()
content_blob = store.get(ContentBlob, hash_value)
if not content_blob:
raise ValueError("content blob is not there")
finally:
filesync_tm.abort()
d = threads.deferToThread(_check_file)
return d
d = client.dummy_authenticate("open sesame")
filenames = iter("hola_%d" % i for i in xrange(num_files))
for i in range(num_files):
d.addCallbacks(lambda _: client.get_root(), client.test_fail)
d.addCallbacks(lambda root: client.make_file(request.ROOT, root, filenames.next()), client.test_fail)
d.addCallbacks(
lambda mkfile_req: client.put_content(
request.ROOT,
mkfile_req.new_id,
NO_CONTENT_HASH,
hash_value,
crc32_value,
size,
deflated_size,
StringIO(deflated_data),
),
client.test_fail,
)
d.addCallback(check_file)
d.addCallbacks(client.test_done, client.test_fail)
return d
return self.callback_test(auth, timeout=1)