本文整理汇总了Python中sklearn.externals.joblib.Memory类的典型用法代码示例。如果您正苦于以下问题:Python Memory类的具体用法?Python Memory怎么用?Python Memory使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Memory类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_multilabel
def test_multilabel(self):
cache = Memory(cachedir=tempfile.gettempdir())
cached_func = cache.cache(
sklearn.datasets.make_multilabel_classification
)
X, Y = cached_func(
n_samples=150,
n_features=20,
n_classes=5,
n_labels=2,
length=50,
allow_unlabeled=True,
sparse=False,
return_indicator=True,
return_distributions=False,
random_state=1
)
X_train = X[:100, :]
Y_train = Y[:100, :]
X_test = X[101:, :]
Y_test = Y[101:, ]
data = {'X_train': X_train, 'Y_train': Y_train,
'X_test': X_test, 'Y_test': Y_test}
dataset_properties = {'multilabel': True}
cs = SimpleClassificationPipeline(dataset_properties=dataset_properties).\
get_hyperparameter_search_space()
self._test_configurations(configurations_space=cs, data=data)
示例2: _do_subject_slice_timing
def _do_subject_slice_timing(subject_data, ref_slice=0,
slice_order="ascending", interleaved=False,
caching=True, write_output_images=2,
func_prefix=None, func_basenames=None,
ext=None):
if func_prefix is None:
func_prefix = PREPROC_OUTPUT_IMAGE_PREFICES['STC']
if func_basenames is None:
func_basenames = [get_basenames(func)
for func in subject_data.func]
# prepare for smart caching
if caching:
mem = Memory(cachedir=os.path.join(
subject_data.output_dir, 'cache_dir'), verbose=100)
runner = lambda handle: mem.cache(handle) if caching else handle
stc_output = []
original_bold = subject_data.func
for sess_func, sess_id in zip(subject_data.func,
range(subject_data.n_sessions)):
fmristc = runner(fMRISTC(slice_order=slice_order, ref_slice=ref_slice,
interleaved=interleaved, verbose=True).fit)(
raw_data=sess_func)
stc_output.append(runner(fmristc.transform)(
sess_func,
output_dir=subject_data.tmp_output_dir if (
write_output_images > 0) else None,
basenames=func_basenames[sess_id],
prefix=func_prefix, ext=ext))
subject_data.func = stc_output
del original_bold, fmristc
if write_output_images > 1:
subject_data.hardlink_output_files()
return subject_data
示例3: _delete_orientation
def _delete_orientation(self):
"""
Delete orientation metadata. Garbage orientation metadata can lead to
severe mis-registration trouble.
"""
# prepare for smart caching
if self.scratch is None:
self.scratch = self.output_dir
cache_dir = os.path.join(self.scratch, 'cache_dir')
if not os.path.exists(cache_dir):
os.makedirs(cache_dir)
mem = Memory(cachedir=cache_dir, verbose=5)
# deleteorient for func
for attr in ['n_sessions', 'session_output_dirs']:
if getattr(self, attr) is None:
warnings.warn("'%s' attribute of is None! Skipping" % attr)
break
else:
self.func = [mem.cache(delete_orientation)(
self.func[sess], self.session_output_dirs[sess])
for sess in range(self.n_sessions)]
# deleteorient for anat
if self.anat is not None:
self.anat = mem.cache(delete_orientation)(
self.anat, self.anat_output_dir)
示例4: fit
def fit(self, X, y=None):
"""
Compute agglomerative clustering.
Parameters
----------
X : array-like, shape=(n_samples, n_features)
Returns
-------
self
"""
memory = self.memory
if isinstance(memory, six.string_types):
memory = Memory(cachedir=memory, verbose=0)
if self.n_landmarks is None:
distances = memory.cache(pdist)(X, self.metric)
else:
if self.landmark_strategy == 'random':
land_indices = check_random_state(self.random_state).randint(len(X), size=self.n_landmarks)
else:
land_indices = np.arange(len(X))[::(len(X)//self.n_landmarks)][:self.n_landmarks]
distances = memory.cache(pdist)(X[land_indices], self.metric)
tree = memory.cache(linkage)(distances, method=self.linkage)
self.landmark_labels_ = fcluster(tree, criterion='maxclust', t=self.n_clusters) - 1
if self.n_landmarks is None:
self.landmarks_ = X
else:
self.landmarks_ = X[land_indices]
return self
示例5: fetch_asirra
def fetch_asirra(image_count=1000):
partial_path = check_fetch_asirra()
m = Memory(cachedir=partial_path, compress=6, verbose=0)
load_func = m.cache(_fetch_asirra)
images, target = load_func(partial_path, image_count=image_count)
return Bunch(data=images.reshape(len(images), -1),
images=images, target=target,
DESCR="Asirra cats and dogs dataset")
示例6: _cache
def _cache(self, func, memory_level=1, **kwargs):
""" Return a joblib.Memory object if necessary.
The memory_level determines the level above which the wrapped
function output is cached. By specifying a numeric value for
this level, the user can to control the amount of cache memory
used. This function will cache the function call or not
depending on the cache level.
Parameters
----------
func: python function
The function which output is to be cached.
memory_level: integer
The memory_level from which caching must be enabled for the wrapped
function.
Returns
-------
Either the original function, if there is no need to cache it (because
the requested level is lower than the value given to _cache()) or a
joblib.Memory object that wraps the function func.
"""
# Creates attributes if they don't exist
# This is to make creating them in __init__() optional.
if not hasattr(self, "memory_level"):
self.memory_level = 0
if not hasattr(self, "memory"):
self.memory = Memory(cachedir=None)
# If cache level is 0 but a memory object has been provided, set
# memory_level to 1 with a warning.
if self.memory_level == 0:
if (isinstance(self.memory, basestring)
or self.memory.cachedir is not None):
warnings.warn("memory_level is currently set to 0 but "
"a Memory object has been provided. "
"Setting memory_level to 1.")
self.memory_level = 1
if self.memory_level < memory_level:
mem = Memory(cachedir=None)
return mem.cache(func, **kwargs)
else:
memory = self.memory
if isinstance(memory, basestring):
memory = Memory(cachedir=memory)
if not isinstance(memory, Memory):
raise TypeError("'memory' argument must be a string or a "
"joblib.Memory object.")
if memory.cachedir is None:
warnings.warn("Caching has been enabled (memory_level = %d) but no"
" Memory object or path has been provided (parameter"
" memory). Caching deactivated for function %s." %
(self.memory_level, func.func_name))
return memory.cache(func, **kwargs)
示例7: get_all_metadata
def get_all_metadata(config=None, args=None):
if config == None and args == None:
raise Exception('Either config or args need to be not None')
if config == None:
config = get_config(args)
class_meta = read_class_meta(config.dataset.class_meta_file)
attrib_meta_with_name = read_attribute_meta(config.dataset.attrib_meta_file)
attrib_meta = attrib_meta_with_name.drop('class_name',axis=1)
train_annos = read_image_annotations(config.dataset.train_annos_file)
test_annos = read_image_annotations(config.dataset.test_annos_file,
has_class_id=False)
domain_meta = read_domain_meta(config.dataset.domain_meta_file)
train_annos['class_name'] = np.array([class_meta.class_name[class_index] for
class_index in
train_annos.class_index])
# test_annos['class_name'] = np.array([class_meta.class_name[class_index] for
# class_index in
# test_annos.class_index])
# Prepand path to the dataset to each img_path
train_annos.img_path = train_annos.img_path.apply(lambda x: config.dataset.main_path.joinpath(x).abspath())
test_annos.img_path = test_annos.img_path.apply(lambda x: config.dataset.main_path.joinpath(x).abspath())
# Filter the class meta and train/test annotations to just use the
# domains defined in config
class_meta = class_meta[class_meta.domain_index.isin(config.dataset.domains)]
train_annos = train_annos[train_annos.domain_index.isin(config.dataset.domains)]
test_annos = test_annos[test_annos.domain_index.isin(config.dataset.domains)]
# Create dev set
dev_annos_train, dev_annos_test = create_dev_set(train_annos,
config)
# Should we use the dev set as the test set
if config.dataset.dev_set.use:
train_used, test_used = dev_annos_train, dev_annos_test
else:
train_used, test_used = train_annos, test_annos
if config.flip_images:
memory = Memory(cachedir=config.cache_dir, verbose=config.logging.verbose)
flip_func = memory.cache(create_flipped_images)
train_used = flip_func(train_used, config)
return ({'real_train_annos': train_annos,
'real_test_annos': test_annos,
'train_annos': train_used,
'test_annos': test_used,
'validation_annos': dev_annos_test,
'class_meta': class_meta,
'domain_meta': domain_meta,
'attrib_meta': attrib_meta,
'attrib_meta_with_name': attrib_meta_with_name},
config)
示例8: cache
def cache(func, memory, ref_memory_level=2, memory_level=1, **kwargs):
""" Return a joblib.Memory object.
The memory_level determines the level above which the wrapped
function output is cached. By specifying a numeric value for
this level, the user can to control the amount of cache memory
used. This function will cache the function call or not
depending on the cache level.
Parameters
----------
func: function
The function which output is to be cached.
memory: instance of joblib.Memory or string
Used to cache the function call.
ref_memory_level: int
The reference memory_level used to determine if function call must
be cached or not (if memory_level is larger than ref_memory_level
the function is cached)
memory_level: int
The memory_level from which caching must be enabled for the wrapped
function.
kwargs: keyword arguments
The keyword arguments passed to memory.cache
Returns
-------
mem: joblib.MemorizedFunc
object that wraps the function func. This object may be
a no-op, if the requested level is lower than the value given
to _cache()). For consistency, a joblib.Memory object is always
returned.
"""
if ref_memory_level <= memory_level or memory is None:
memory = Memory(cachedir=None)
else:
memory = memory
if isinstance(memory, basestring):
memory = Memory(cachedir=memory)
if not isinstance(memory, memory_classes):
raise TypeError("'memory' argument must be a string or a "
"joblib.Memory object. "
"%s %s was given." % (memory, type(memory)))
if memory.cachedir is None:
warnings.warn("Caching has been enabled (memory_level = %d) "
"but no Memory object or path has been provided"
" (parameter memory). Caching deactivated for "
"function %s." %
(ref_memory_level, func.func_name),
stacklevel=2)
return memory.cache(func, **kwargs)
示例9: _do_subject_coregister
def _do_subject_coregister(
subject_data, coreg_func_to_anat=True, caching=True,
ext=None, write_output_images=2, func_basenames=None, func_prefix="",
anat_basename=None, anat_prefix="", report=True, verbose=True):
ref_brain = 'func'
src_brain = 'anat'
ref = subject_data.func[0]
src = subject_data.anat
if coreg_func_to_anat:
ref_brain, src_brain = src_brain, ref_brain
ref, src = src, ref
# prepare for smart caching
if caching:
mem = Memory(cachedir=os.path.join(
subject_data.output_dir, 'cache_dir'), verbose=100)
runner = lambda handle: mem.cache(handle) if caching else handle
# estimate realignment (affine) params for coreg
coreg = runner(Coregister(verbose=verbose).fit)(ref, src)
# apply coreg
if coreg_func_to_anat:
if func_basenames is None:
func_basenames = [get_basenames(func)
for func in subject_data.func]
coreg_func = []
for sess_func, sess_id in zip(subject_data.func, range(
subject_data.n_sessions)):
coreg_func.append(runner(coreg.transform)(
sess_func, output_dir=subject_data.tmp_output_dir if (
write_output_images == 2) else None,
basenames=func_basenames[sess_id] if coreg_func_to_anat
else anat_basename, prefix=func_prefix))
subject_data.func = coreg_func
src = load_vols(subject_data.func[0])[0]
else:
if anat_basename is None:
anat_basename = get_basenames(subject_data.anat)
subject_data.anat = runner(coreg.transform)(
subject_data.anat, basename=anat_basename,
output_dir=subject_data.tmp_output_dir if (
write_output_images == 2) else None, prefix=anat_prefix,
ext=ext)
src = subject_data.anat
# generate coregistration QA thumbs
if report:
subject_data.generate_coregistration_thumbnails(
coreg_func_to_anat=coreg_func_to_anat, nipype=False)
del coreg
if write_output_images > 1:
subject_data.hardlink_output_files()
return subject_data
示例10: get_multilabel
def get_multilabel(self):
cache = Memory(cachedir=tempfile.gettempdir())
cached_func = cache.cache(make_multilabel_classification)
return cached_func(
n_samples=100,
n_features=10,
n_classes=5,
n_labels=5,
return_indicator=True,
random_state=1
)
示例11: fit
def fit(self, niimgs, y=None):
"""Compute the mask corresponding to the data
Parameters
----------
niimgs: list of filenames or NiImages
Data on which the mask must be calculated. If this is a list,
the affine is considered the same for all.
"""
memory = self.memory
if isinstance(memory, basestring):
memory = Memory(cachedir=memory)
# Load data (if filenames are given, load them)
if self.verbose > 0:
print "[%s.fit] Loading data from %s" % (
self.__class__.__name__,
utils._repr_niimgs(niimgs)[:200])
data = []
for niimg in niimgs:
# Note that data is not loaded into memory at this stage
# if niimg is a string
data.append(utils.check_niimgs(niimg, accept_3d=True))
# Compute the mask if not given by the user
if self.mask is None:
if self.verbose > 0:
print "[%s.fit] Computing the mask" % self.__class__.__name__
mask = memory.cache(masking.compute_multi_epi_mask,
ignore=['verbose'])(
niimgs,
connected=self.mask_connected,
opening=self.mask_opening,
lower_cutoff=self.mask_lower_cutoff,
upper_cutoff=self.mask_upper_cutoff,
n_jobs=self.n_jobs,
verbose=(self.verbose - 1))
self.mask_img_ = Nifti1Image(mask.astype(np.int), data[0].get_affine())
else:
self.mask_img_ = utils.check_niimg(self.mask)
# If resampling is requested, resample also the mask
# Resampling: allows the user to change the affine, the shape or both
if self.verbose > 0:
print "[%s.transform] Resampling mask" % self.__class__.__name__
self.mask_img_ = memory.cache(resampling.resample_img)(
self.mask_img_,
target_affine=self.target_affine,
target_shape=self.target_shape,
copy=(self.target_affine is not None and
self.target_shape is not None))
return self
示例12: _check_memory
def _check_memory(memory, verbose=0):
"""Function to ensure an instance of a joblib.Memory object.
Parameters
----------
memory: None or instance of joblib.Memory or str
Used to cache the masking process.
If a str is given, it is the path to the caching directory.
verbose : int, optional (default 0)
Verbosity level.
Returns
-------
instance of joblib.Memory.
"""
if memory is None:
memory = Memory(cachedir=None, verbose=verbose)
if isinstance(memory, _basestring):
cache_dir = memory
if nilearn.EXPAND_PATH_WILDCARDS:
cache_dir = os.path.expanduser(cache_dir)
# Perform some verifications on given path.
split_cache_dir = os.path.split(cache_dir)
if (len(split_cache_dir) > 1 and
(not os.path.exists(split_cache_dir[0]) and
split_cache_dir[0] != '')):
if (not nilearn.EXPAND_PATH_WILDCARDS and
cache_dir.startswith("~")):
# Maybe the user want to enable expanded user path.
error_msg = ("Given cache path parent directory doesn't "
"exists, you gave '{0}'. Enabling "
"nilearn.EXPAND_PATH_WILDCARDS could solve "
"this issue.".format(split_cache_dir[0]))
elif memory.startswith("~"):
# Path built on top of expanded user path doesn't exist.
error_msg = ("Given cache path parent directory doesn't "
"exists, you gave '{0}' which was expanded "
"as '{1}' but doesn't exist either. Use "
"nilearn.EXPAND_PATH_WILDCARDS to deactivate "
"auto expand user path (~) behavior."
.format(split_cache_dir[0],
os.path.dirname(memory)))
else:
# The given cache base path doesn't exist.
error_msg = ("Given cache path parent directory doesn't "
"exists, you gave '{0}'."
.format(split_cache_dir[0]))
raise ValueError(error_msg)
memory = Memory(cachedir=cache_dir, verbose=verbose)
return memory
示例13: cache
def cache(self, func, func_memory_level, **kwargs):
""" Return a joblib.Memory object if necessary (depends on memory_level)
The memory_level is a rough estimator of the amount of memory necessary
to cache a function call. By specifying a numeric value for this level,
the user will be able to control more or less the memory used on his
computer. This function will cache the function call or not depending
on the memory level. This is an helper to avoid code pasting.
Parameters
----------
self: python object
The object containing information about caching. It must have a
memory attribute (used if caching is necessary) and an integer
memory_level attribute to determine if the function must be cached
or not.
func: python function
The function that may be cached
func_memory_level: integer
The memory_level from which caching must be enabled.
Returns
-------
Either the original function (if there is no need to cache it) or a
joblib.Memory object that will be used to cache the function call.
"""
# if memory level is 0 but a memory object is provided, put memory_level
# to 1 with a warning
if self.memory_level == 0:
if hasattr(self, 'memory') and self.memory is not None \
and (isinstance(self.memory, basestring)
or self.memory.cachedir is not None):
warnings.warn("memory_level is set to 0 but a Memory object has"
" been provided. Setting memory_level to 1.")
self.memory_level = 1
if self.memory_level < func_memory_level:
return func
else:
memory = self.memory
if isinstance(memory, basestring):
memory = Memory(cachedir=memory)
if memory.cachedir is None:
warnings.warn("Caching has been enabled (memory_level = %d) but no"
" Memory object or path has been provided (parameter"
" memory). Caching canceled for function %s." %
(self.memory_level, func.func_name))
return memory.cache(func, **kwargs)
示例14: _fit
def _fit(self, X, y=None, **fit_params):
self._validate_steps()
# Setup the memory
memory = self.memory
if memory is None:
memory = Memory(cachedir=None, verbose=0)
elif isinstance(memory, six.string_types):
memory = Memory(cachedir=memory, verbose=0)
elif not isinstance(memory, Memory):
raise ValueError("'memory' should either be a string or"
" a joblib.Memory instance, got"
" 'memory={!r}' instead.".format(memory))
fit_transform_one_cached = memory.cache(_fit_transform_one)
fit_sample_one_cached = memory.cache(_fit_sample_one)
fit_params_steps = dict((name, {}) for name, step in self.steps
if step is not None)
for pname, pval in six.iteritems(fit_params):
step, param = pname.split('__', 1)
fit_params_steps[step][param] = pval
Xt = X
yt = y
for step_idx, (name, transformer) in enumerate(self.steps[:-1]):
if transformer is None:
pass
else:
if memory.cachedir is None:
# we do not clone when caching is disabled to preserve
# backward compatibility
cloned_transformer = transformer
else:
cloned_transformer = clone(transformer)
# Fit or load from cache the current transfomer
if (hasattr(cloned_transformer, "transform") or
hasattr(cloned_transformer, "fit_transform")):
Xt, fitted_transformer = fit_transform_one_cached(
cloned_transformer, None, Xt, yt,
**fit_params_steps[name])
elif hasattr(cloned_transformer, "sample"):
Xt, yt, fitted_transformer = fit_sample_one_cached(
cloned_transformer, Xt, yt,
**fit_params_steps[name])
# Replace the transformer of the step with the fitted
# transformer. This is necessary when loading the transformer
# from the cache.
self.steps[step_idx] = (name, fitted_transformer)
if self._final_estimator is None:
return Xt, yt, {}
return Xt, yt, fit_params_steps[self.steps[-1][0]]
示例15: _niigz2nii
def _niigz2nii(self):
"""
Convert .nii.gz to .nii (crucial for SPM).
"""
cache_dir = os.path.join(self.scratch, 'cache_dir')
mem = Memory(cache_dir, verbose=100)
self._sanitize_session_output_dirs()
if not None in [self.func, self.n_sessions, self.session_output_dirs]:
self.func = [mem.cache(do_niigz2nii)(
self.func[sess], output_dir=self.session_output_dirs[sess])
for sess in range(self.n_sessions)]
if not self.anat is None:
self.anat = mem.cache(do_niigz2nii)(
self.anat, output_dir=self.anat_output_dir)