本文整理汇总了Python中cPickle.Unpickler.find_global方法的典型用法代码示例。如果您正苦于以下问题:Python Unpickler.find_global方法的具体用法?Python Unpickler.find_global怎么用?Python Unpickler.find_global使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类cPickle.Unpickler
的用法示例。
在下文中一共展示了Unpickler.find_global方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: unpickle
# 需要导入模块: from cPickle import Unpickler [as 别名]
# 或者: from cPickle.Unpickler import find_global [as 别名]
def unpickle(file_path=None):
"""Load pickled Python objects from a file.
Almost like ``cPickle.load(open(file_path))``, but also loads object saved
with older versions of Eelbrain, and allows using a system file dialog to
select a file.
Parameters
----------
file_path : None | str
Path to a pickled file. If None (default), a system file dialog will be
shown. If the user cancels the file dialog, a RuntimeError is raised.
"""
if file_path is None:
filetypes = [("Pickles (*.pickled)", '*.pickled'), ("All files", '*')]
file_path = ui.ask_file("Select File to Unpickle", "Select a pickled "
"file to unpickle", filetypes)
if file_path is False:
raise RuntimeError("User canceled")
else:
print repr(file_path)
else:
file_path = os.path.expanduser(file_path)
if not os.path.exists(file_path):
new_path = os.extsep.join((file_path, 'pickled'))
if os.path.exists(new_path):
file_path = new_path
with open(file_path, 'r') as fid:
unpickler = Unpickler(fid)
unpickler.find_global = map_paths
obj = unpickler.load()
return obj
示例2: load_state
# 需要导入模块: from cPickle import Unpickler [as 别名]
# 或者: from cPickle.Unpickler import find_global [as 别名]
def load_state(self, state):
"""Load an image_set_list's state from the string returned from save_state"""
self.__image_sets = []
self.__image_sets_by_key = {}
# Make a safe unpickler
p = Unpickler(StringIO(state))
def find_global(module_name, class_name):
logger.debug("Pickler wants %s:%s", module_name, class_name)
if module_name not in ("numpy", "numpy.core.multiarray"):
logger.critical(
"WARNING WARNING WARNING - your batch file has asked to load %s.%s."
" If this looks in any way suspicious please contact us at www.cellprofiler.org",
module_name,
class_name,
)
raise ValueError("Illegal attempt to unpickle class %s.%s", (module_name, class_name))
__import__(module_name)
mod = sys.modules[module_name]
return getattr(mod, class_name)
p.find_global = find_global
count = p.load()
all_keys = [p.load() for i in range(count)]
self.__legacy_fields = p.load()
#
# Have to do in this order in order for the image set's
# legacy_fields property to hook to the right legacy_fields
#
for i in range(count):
self.get_image_set(all_keys[i])
示例3: _unpickle
# 需要导入模块: from cPickle import Unpickler [as 别名]
# 或者: from cPickle.Unpickler import find_global [as 别名]
def _unpickle(pickled):
""" Unpickles a string and catch all types of errors it can throw,
to raise only NotReadableJobError in case of error.
OpenERP stores the text fields as 'utf-8', so we specify the encoding.
`loads()` may raises many types of exceptions (AttributeError,
IndexError, TypeError, KeyError, ...). They are all catched and
raised as `NotReadableJobError`).
Pickle could be exploited by an attacker who would write a value in a job
that would run arbitrary code when unpickled. This is why we set a custom
``find_global`` method on the ``Unpickler``, only jobs and a whitelist of
classes/functions are allowed to be unpickled (plus the builtins types).
"""
def restricted_find_global(mod_name, fn_name):
__import__(mod_name)
mod = sys.modules[mod_name]
fn = getattr(mod, fn_name)
if not (fn in JOB_REGISTRY or fn in _UNPICKLE_WHITELIST):
raise UnpicklingError(
'{}.{} is not allowed in jobs'.format(mod_name, fn_name)
)
return fn
unpickler = Unpickler(StringIO(pickled))
unpickler.find_global = restricted_find_global
try:
unpickled = unpickler.load()
except (StandardError, UnpicklingError):
raise NotReadableJobError('Could not unpickle.', pickled)
return unpickled
示例4: got_data
# 需要导入模块: from cPickle import Unpickler [as 别名]
# 或者: from cPickle.Unpickler import find_global [as 别名]
def got_data(self, port_agent_packet):
"""
Called by the instrument connection when data is available.
Also add data to the chunker and when received call got_chunk
to publish results.
"""
data_length = port_agent_packet.get_data_length()
data = port_agent_packet.get_data()
timestamp = port_agent_packet.get_timestamp()
log.debug("Got Data: %s" % data)
log.debug("Add Port Agent Timestamp: %s" % timestamp)
unpickler = Unpickler(StringIO(data))
# Disable class unpickling, for security; record should be all
# built-in types. Note this only works with cPickle.
unpickler.find_global = None
# pkt is an antelope.Pkt.Packet object converted to a dict. Refer to
# the documentation for the Antelope Python bindings for compelete
# details.
pkt = unpickler.load()
for particle in self._particle_factory(pkt, timestamp):
self._publish_particle(particle)
示例5: getNewState
# 需要导入模块: from cPickle import Unpickler [as 别名]
# 或者: from cPickle.Unpickler import find_global [as 别名]
def getNewState(self, file):
# Would like to do load(file) here... but it doesn't work with universal line endings, see Python bug 1724366
from cStringIO import StringIO
unpickler = Unpickler(StringIO(file.read()))
# Magic to keep us backward compatible in the face of packages changing...
unpickler.find_global = self.findGlobal
return unpickler.load()
示例6: state
# 需要导入模块: from cPickle import Unpickler [as 别名]
# 或者: from cPickle.Unpickler import find_global [as 别名]
def state(self, oid, serial, prfactory, p=''):
p = p or self.loadSerial(oid, serial)
file = StringIO(p)
unpickler = Unpickler(file)
unpickler.find_global = find_global
unpickler.persistent_load = prfactory.persistent_load
unpickler.load() # skip the class tuple
return unpickler.load()
示例7: state
# 需要导入模块: from cPickle import Unpickler [as 别名]
# 或者: from cPickle.Unpickler import find_global [as 别名]
def state(self, oid, serial, prfactory, p=""):
p = p or self.loadSerial(oid, serial)
p = self._crs_untransform_record_data(p)
file = StringIO(p)
unpickler = Unpickler(file)
unpickler.find_global = find_global
unpickler.persistent_load = prfactory.persistent_load
unpickler.load() # skip the class tuple
return unpickler.load()
示例8: tryToResolveConflict
# 需要导入模块: from cPickle import Unpickler [as 别名]
# 或者: from cPickle.Unpickler import find_global [as 别名]
def tryToResolveConflict(self, oid, committedSerial, oldSerial, newpickle,
committedData=''):
# class_tuple, old, committed, newstate = ('',''), 0, 0, 0
try:
prfactory = PersistentReferenceFactory()
newpickle = self._crs_untransform_record_data(newpickle)
file = StringIO(newpickle)
unpickler = Unpickler(file)
unpickler.find_global = find_global
unpickler.persistent_load = prfactory.persistent_load
meta = unpickler.load()
if isinstance(meta, tuple):
klass = meta[0]
newargs = meta[1] or ()
if isinstance(klass, tuple):
klass = find_global(*klass)
else:
klass = meta
newargs = ()
if klass in _unresolvable:
raise ConflictError
newstate = unpickler.load()
inst = klass.__new__(klass, *newargs)
try:
resolve = inst._p_resolveConflict
except AttributeError:
_unresolvable[klass] = 1
raise ConflictError
old = state(self, oid, oldSerial, prfactory)
committed = state(self, oid, committedSerial, prfactory, committedData)
resolved = resolve(old, committed, newstate)
file = StringIO()
pickler = Pickler(file,1)
pickler.inst_persistent_id = persistent_id
pickler.dump(meta)
pickler.dump(resolved)
return self._crs_transform_record_data(file.getvalue(1))
except (ConflictError, BadClassName):
pass
except:
# If anything else went wrong, catch it here and avoid passing an
# arbitrary exception back to the client. The error here will mask
# the original ConflictError. A client can recover from a
# ConflictError, but not necessarily from other errors. But log
# the error so that any problems can be fixed.
logger.error("Unexpected error", exc_info=True)
raise ConflictError(oid=oid, serials=(committedSerial, oldSerial),
data=newpickle)
示例9: load
# 需要导入模块: from cPickle import Unpickler [as 别名]
# 或者: from cPickle.Unpickler import find_global [as 别名]
def load(self, file):
"""Unpickle a file."""
safeUnpickler = Unpickler(file)
safeUnpickler.find_global = self.findGlobal
return safeUnpickler.load()
示例10: import_during_commit
# 需要导入模块: from cPickle import Unpickler [as 别名]
# 或者: from cPickle.Unpickler import find_global [as 别名]
def import_during_commit(self, transaction, f, return_oid_list):
"""Import data during two-phase commit.
Invoked by the transaction manager mid commit.
Appends one item, the OID of the first object created,
to return_oid_list.
"""
oids = {}
# IMPORTANT: This code should be consistent with the code in
# serialize.py. It is currently out of date and doesn't handle
# weak references.
def persistent_load(ooid):
"""Remap a persistent id to a new ID and create a ghost for it."""
klass = None
if isinstance(ooid, tuple):
ooid, klass = ooid
if ooid in oids:
oid = oids[ooid]
else:
if klass is None:
oid = self._storage.new_oid()
else:
oid = self._storage.new_oid(), klass
oids[ooid] = oid
return Ghost(oid)
while 1:
header = f.read(16)
if header == export_end_marker:
break
if len(header) != 16:
raise ExportError("Truncated export file")
# Extract header information
ooid = header[:8]
length = u64(header[8:16])
data = f.read(length)
if len(data) != length:
raise ExportError("Truncated export file")
if oids:
oid = oids[ooid]
if isinstance(oid, tuple):
oid = oid[0]
else:
oids[ooid] = oid = self._storage.new_oid()
return_oid_list.append(oid)
# Blob support
blob_begin = f.read(len(blob_begin_marker))
if blob_begin == blob_begin_marker:
# Copy the blob data to a temporary file
# and remember the name
blob_len = u64(f.read(8))
blob_filename = mktemp()
blob_file = open(blob_filename, "wb")
cp(f, blob_file, blob_len)
blob_file.close()
else:
f.seek(-len(blob_begin_marker),1)
blob_filename = None
pfile = StringIO(data)
unpickler = Unpickler(pfile)
unpickler.persistent_load = persistent_load
unpickler.find_global = find_broken_global
newp = StringIO()
pickler = Pickler(newp, 1)
pickler.inst_persistent_id = persistent_id
pickler.dump(unpickler.load())
pickler.dump(unpickler.load())
data = newp.getvalue()
if blob_filename is not None:
self._storage.storeBlob(oid, None, data, blob_filename,
'', transaction)
else:
self._storage.store(oid, None, data, '', transaction)