本文整理汇总了Python中cPickle.Unpickler类的典型用法代码示例。如果您正苦于以下问题:Python Unpickler类的具体用法?Python Unpickler怎么用?Python Unpickler使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Unpickler类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: zodb_unpickle
def zodb_unpickle(data):
"""Unpickle an object stored using the format expected by ZODB."""
f = StringIO(data)
u = Unpickler(f)
klass_info = u.load()
if isinstance(klass_info, types.TupleType):
if isinstance(klass_info[0], types.TupleType):
modname, klassname = klass_info[0]
args = klass_info[1]
else:
modname, klassname = klass_info
args = None
if modname == "__main__":
ns = globals()
else:
mod = import_helper(modname)
ns = mod.__dict__
try:
klass = ns[klassname]
except KeyError:
sys.stderr.write("can't find %s in %s" % (klassname,
repr(ns)))
inst = klass()
else:
raise ValueError, "expected class info: %s" % repr(klass_info)
state = u.load()
inst.__setstate__(state)
return inst
示例2: network_setup
def network_setup(self):
dict = {}
self.net_mtime = self.network_connect()
if self.net_mtime != None:
if os.path.exists( self.user_data_file ):
self.network_update()
local_mtime = int(os.stat( self.user_data_file ).st_mtime)
if local_mtime > self.net_mtime:
self.network_upload()
elif local_mtime < self.net_mtime:
self.network_download()
else:
self.network_download()
if os.path.exists( self.user_data_file ):
ph = open( self.user_data_file )
dict = Unpickler( ph ).load()[-1]
if not os.path.exists( self.user_data_file ) and self.net_mtime == None:
ph = open( self.user_data_file, 'w+' )
Pickler( ph ).dump( dict )
ph.close()
os.utime( self.user_data_file, (0,0) )
last_month = dateDelta( date.today() ).get_last_month()
keys = dict.keys()
keys.sort()
for key in keys:
if key[:7] < '%0.4d-%0.2d' % (last_month.year,last_month.month):
dict.pop( key )
else:
break
self.freetime.update( dict )
示例3: setstate
def setstate(self, object):
"""
Unlike the 'stock' Connection class' setstate, this method
doesn't raise ConflictErrors. This is potentially dangerous
for applications that need absolute consistency, but
sessioning is not one of those.
"""
oid=object._p_oid
invalid = self._invalid
if invalid(None):
# only raise a conflict if there was
# a mass invalidation, but not if we see this
# object's oid as invalid
raise ConflictError, `oid`
p, serial = self._storage.load(oid, self._version)
file=StringIO(p)
unpickler=Unpickler(file)
unpickler.persistent_load=self._persistent_load
unpickler.load()
state = unpickler.load()
if hasattr(object, '__setstate__'):
object.__setstate__(state)
else:
d=object.__dict__
for k,v in state.items(): d[k]=v
object._p_serial=serial
示例4: network_update
def network_update(self):
#
# We have network, we have local file, we may not have
# remote file.
if self.userkey in self.sftp.listdir( self.category ):
local_mtime = os.stat( self.user_data_file ).st_mtime
if int(local_mtime) == 0:
self.sftp.get( self.category + '/' + self.userkey, self.user_data_file + '.tmp' )
ph = open( self.user_data_file + '.tmp' )
net_data = Unpickler( ph ).load()[-1]
ph.close()
os.unlink( self.user_data_file + '.tmp' )
ph = open( self.user_data_file )
local_data = Unpickler( ph ).load()[-1]
ph.close()
local_data.update( net_data )
self.freetime = freeTime( dict=local_data )
ph = open( self.user_data_file, 'w+' )
Pickler( ph ).dump( self.bundle_data() )
ph.close()
else:
self.sftp.put( self.user_data_file, self.category + '/' + self.userkey )
mtime = self.sftp.stat( self.category + '/' + self.userkey ).st_mtime
os.utime( self.user_data_file, (mtime,mtime) )
示例5: _cloneByPickle
def _cloneByPickle(self, obj):
"""Returns a deep copy of a ZODB object, loading ghosts as needed.
"""
modifier = getToolByName(self, 'portal_modifier')
callbacks = modifier.getOnCloneModifiers(obj)
if callbacks is not None:
pers_id, pers_load, inside_orefs, outside_orefs = callbacks[0:4]
else:
inside_orefs, outside_orefs = (), ()
stream = StringIO()
p = Pickler(stream, 1)
if callbacks is not None:
p.persistent_id = pers_id
cmf_uid = getattr(obj, 'cmf_uid', None)
if IUniqueIdAnnotation.providedBy(cmf_uid):
setattr(obj, 'cmf_uid', cmf_uid())
try:
p.dump(aq_base(obj))
except TypeError:
# just try again, this then seems to work
# WTF?
p.dump(aq_base(obj))
approxSize = stream.tell()
stream.seek(0)
u = Unpickler(stream)
if callbacks is not None:
u.persistent_load = pers_load
return approxSize, u.load(), inside_orefs, outside_orefs
示例6: zodb_unpickle
def zodb_unpickle(data):
"""Unpickle an object stored using the format expected by ZODB."""
f = StringIO(data)
u = Unpickler(f)
u.persistent_load = persistent_load
klass_info = u.load()
if isinstance(klass_info, tuple):
if isinstance(klass_info[0], type):
# Unclear: what is the second part of klass_info?
klass, xxx = klass_info
assert not xxx
else:
if isinstance(klass_info[0], tuple):
modname, klassname = klass_info[0]
else:
modname, klassname = klass_info
if modname == "__main__":
ns = globals()
else:
mod = import_helper(modname)
ns = mod.__dict__
try:
klass = ns[klassname]
except KeyError:
print >> sys.stderr, "can't find %s in %r" % (klassname, ns)
inst = klass()
else:
raise ValueError("expected class info: %s" % repr(klass_info))
state = u.load()
inst.__setstate__(state)
return inst
示例7: unpickle
def unpickle(file_path=None):
"""Load pickled Python objects from a file.
Almost like ``cPickle.load(open(file_path))``, but also loads object saved
with older versions of Eelbrain, and allows using a system file dialog to
select a file.
Parameters
----------
file_path : None | str
Path to a pickled file. If None (default), a system file dialog will be
shown. If the user cancels the file dialog, a RuntimeError is raised.
"""
if file_path is None:
filetypes = [("Pickles (*.pickled)", '*.pickled'), ("All files", '*')]
file_path = ui.ask_file("Select File to Unpickle", "Select a pickled "
"file to unpickle", filetypes)
if file_path is False:
raise RuntimeError("User canceled")
else:
print repr(file_path)
else:
file_path = os.path.expanduser(file_path)
if not os.path.exists(file_path):
new_path = os.extsep.join((file_path, 'pickled'))
if os.path.exists(new_path):
file_path = new_path
with open(file_path, 'r') as fid:
unpickler = Unpickler(fid)
unpickler.find_global = map_paths
obj = unpickler.load()
return obj
示例8: loads
def loads(self, s):
up = Unpickler(BytesIO(s))
up.persistent_load = self._get_object
try:
return up.load()
except KeyError, e:
raise UnpicklingError("Could not find Node class for %s" % e)
示例9: deserialize
def deserialize(self, event, state):
assert IFullDeserializationEvent.isImplementedBy(event)
assert isinstance(event.obj, Persistent)
# Set up to resolve cyclic references to the object.
event.deserialized('self', event.obj)
state = state.strip()
if state:
if state.startswith('#'):
# Text-encoded pickles start with a pound sign.
# (A pound sign is not a valid pickle opcode.)
data = decode_from_text(state)
else:
data = state
infile = StringIO(data)
u = Unpickler(infile)
u.persistent_load = event.resolve_internal
s = u.load()
if not hasattr(s, 'items'):
# Turn the list back into a dictionary
s_list = s
s = {}
for key, value in s_list:
s[key] = value
event.obj.__dict__.update(s)
try:
unmanaged = u.load()
except EOFError:
# old pickle with no list of unmanaged objects
pass
else:
event.upos.extend(unmanaged)
示例10: deserialize
def deserialize(self, message, task_id=None):
"""Deserialize an object
:param message: A serialized object (string).
:param deferred: When true load deferreds. When false
raise an error if the message contains deferreds.
"""
fail = []
if task_id is None:
def persistent_load(task_id):
raise UnpicklingError('message contained references to '
'external objects: %s' % task_id)
else:
args = self._queue.get_arguments(task_id)
args = {k: loads(v) for k, v in args.items()}
def persistent_load(arg_id):
value = args[arg_id]
if isinstance(value, TaskFailure):
fail.append(value)
return value
data = StringIO(message)
pickle = Unpickler(data)
pickle.persistent_load = persistent_load
obj = pickle.load()
if fail and not obj.on_error_pass:
# TODO detect errors earlier, fail earlier, cancel enqueued tasks
self.set_result(obj, fail[0])
obj = None
return obj
示例11: load_state
def load_state(self, state):
"""Load an image_set_list's state from the string returned from save_state"""
self.__image_sets = []
self.__image_sets_by_key = {}
# Make a safe unpickler
p = Unpickler(StringIO(state))
def find_global(module_name, class_name):
logger.debug("Pickler wants %s:%s", module_name, class_name)
if module_name not in ("numpy", "numpy.core.multiarray"):
logger.critical(
"WARNING WARNING WARNING - your batch file has asked to load %s.%s."
" If this looks in any way suspicious please contact us at www.cellprofiler.org",
module_name,
class_name,
)
raise ValueError("Illegal attempt to unpickle class %s.%s", (module_name, class_name))
__import__(module_name)
mod = sys.modules[module_name]
return getattr(mod, class_name)
p.find_global = find_global
count = p.load()
all_keys = [p.load() for i in range(count)]
self.__legacy_fields = p.load()
#
# Have to do in this order in order for the image set's
# legacy_fields property to hook to the right legacy_fields
#
for i in range(count):
self.get_image_set(all_keys[i])
示例12: cloneByPickle
def cloneByPickle(obj, ignore_list=()):
"""Makes a copy of a ZODB object, loading ghosts as needed.
Ignores specified objects along the way, replacing them with None
in the copy.
"""
ignore_dict = {}
for o in ignore_list:
ignore_dict[id(o)] = o
def persistent_id(ob, ignore_dict=ignore_dict):
if ignore_dict.has_key(id(ob)):
return 'ignored'
if getattr(ob, '_p_changed', 0) is None:
ob._p_changed = 0
return None
def persistent_load(ref):
assert ref == 'ignored'
# Return a placeholder object that will be replaced by
# removeNonVersionedData().
placeholder = SimpleItem()
placeholder.id = "ignored_subobject"
return placeholder
stream = StringIO()
p = Pickler(stream, 1)
p.persistent_id = persistent_id
p.dump(obj)
stream.seek(0)
u = Unpickler(stream)
u.persistent_load = persistent_load
return u.load()
示例13: setklassstate
def setklassstate(self, object):
try:
oid=object._p_oid
__traceback_info__=oid
p, serial = self._storage.load(oid, self._version)
file=StringIO(p)
unpickler=Unpickler(file)
unpickler.persistent_load=self._persistent_load
copy = unpickler.load()
klass, args = copy
if klass is not ExtensionKlass:
LOG('ZODB',ERROR,
"Unexpected klass when setting class state on %s"
% getattr(object,'__name__','(?)'))
return
copy=apply(klass,args)
object.__dict__.clear()
object.__dict__.update(copy.__dict__)
object._p_oid=oid
object._p_jar=self
object._p_changed=0
object._p_serial=serial
except:
LOG('ZODB',ERROR, 'setklassstate failed', error=sys.exc_info())
raise
示例14: __getitem__
def __getitem__(self, oid, tt=type(())):
obj = self._cache.get(oid, None)
if obj is not None:
return obj
__traceback_info__ = (oid)
self.before_load()
p, serial = self._storage.load(oid, self._version)
__traceback_info__ = (oid, p)
file=StringIO(p)
unpickler=Unpickler(file)
# unpickler.persistent_load=self._persistent_load
try:
classification = unpickler.load()
except:
raise ("Could not load oid %s. Pickled data in traceback info may "
"contain clues." % (oid))
osio = self._get_osio()
obj = osio.new_instance(oid, classification)
assert obj is not None
obj._p_oid=oid
obj._p_jar=self
obj._p_changed=None
self._set_serial(obj, serial)
self._cache[oid] = obj
if oid == osio.conf.oid_gen.root_oid:
self._root_=obj # keep a ref
return obj
示例15: validate_result
def validate_result(result, filter_plugins = True, print_files = False):
'''Validate cached.filelist for the given result'''
path_to_report_dir = result.get_report_dir()
path_to_file = os.path.join(path_to_report_dir, 'cached.filelist')
print "validating %s" % path_to_file
if result.isThumbnail:
print("cannot validate: thumbnail")
return None
# for this_type in dmtypes.FILESET_TYPES:
# dmfs = result.get_filestat(this_type)
# if dmfs.action_state != 'L':
# print "cannot validate: not local"
# return None
dmfs = result.get_filestat(dmtypes.SIG)
# Get the cached filelist from cached.filelist file
try:
with open(path_to_file, 'rb') as fhandle:
pickle = Unpickler(fhandle)
cached_filelist = pickle.load()
except IOError as ioerror:
print "%s" % ioerror
return None
# Get a list of files on the filesystem currently
dirs = [dmfs.result.get_report_dir(), dmfs.result.experiment.expDir]
current_fs_filelist = get_walk_filelist(dirs)
# Ignore plugin_out directories
if filter_plugins:
current_fs_filelist = [filename for filename in current_fs_filelist if not '/plugin_out' in filename]
# Ignore the cached.filelist file
current_fs_filelist = [filename for filename in current_fs_filelist if not 'cached.filelist' in filename]
# Ignore the status.txt file
current_fs_filelist = [filename for filename in current_fs_filelist if not 'status.txt' in filename]
# Ignore the serialized_*.json file
current_fs_filelist = [filename for filename in current_fs_filelist if not 'serialized_' in filename]
# See if there are differences
#leftovers = list(set(cached_filelist) - set(current_fs_filelist))
#N.B. This difference here will tell us, "Of the files in the filesystem right now, how many are NOT in the cached.filelist file"
#Even if the cached.filelist contains more files than are currently on the filesystem.
#I am thinking this means we do not care if any action_state is not 'L'. It doesn't matter because we are looking for deficient
#cached.filelist.
leftovers = list(set(current_fs_filelist) - set(cached_filelist))
if print_files:
for i, item in enumerate(leftovers):
if not i: print "FILES MISSING FROM CACHED.FILELIST:"
print item
else:
if len(leftovers) > 0:
print "FILES MISSING FROM CACHED.FILELIST: %d" % len(leftovers)
print "- %s\n" % ("Not valid" if len(leftovers) > 0 else "Valid")
return None