本文整理汇总了Python中sklearn.covariance.GraphLassoCV.fit方法的典型用法代码示例。如果您正苦于以下问题:Python GraphLassoCV.fit方法的具体用法?Python GraphLassoCV.fit怎么用?Python GraphLassoCV.fit使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类sklearn.covariance.GraphLassoCV
的用法示例。
在下文中一共展示了GraphLassoCV.fit方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: calculate_connectivity_matrix
# 需要导入模块: from sklearn.covariance import GraphLassoCV [as 别名]
# 或者: from sklearn.covariance.GraphLassoCV import fit [as 别名]
def calculate_connectivity_matrix(in_data, extraction_method):
'''
after extract_parcellation_time_series() connectivity matrices are calculated via specified extraction method
returns np.array with matrixand saves this array also to matrix_file
'''
# fixme implement sparse inv covar
import os, pickle
import numpy as np
if extraction_method == 'correlation':
correlation_matrix = np.corrcoef(in_data.T)
matrix = {'correlation': correlation_matrix}
elif extraction_method == 'sparse_inverse_covariance':
# Compute the sparse inverse covariance
from sklearn.covariance import GraphLassoCV
estimator = GraphLassoCV()
estimator.fit(in_data)
matrix = {'covariance': estimator.covariance_,
'sparse_inverse_covariance': estimator.precision_}
else:
raise (Exception('Unknown extraction method: %s' % extraction_method))
matrix_file = os.path.join(os.getcwd(), 'matrix.pkl')
with open(matrix_file, 'w') as f:
pickle.dump(matrix, f)
return matrix, matrix_file
示例2: run_clustering
# 需要导入模块: from sklearn.covariance import GraphLassoCV [as 别名]
# 或者: from sklearn.covariance.GraphLassoCV import fit [as 别名]
def run_clustering(methods, cases):
true_method_groups = [m[1] for m in methods]
edge_model = GraphLassoCV(alphas=4, n_refinements=5, n_jobs=3, max_iter=100)
edge_model.fit(cases)
CV = edge_model.covariance_
num_clusters=3
spectral = SpectralClustering(n_clusters=num_clusters,affinity='precomputed')
spectral.fit(np.asarray(CV))
spec_sort=np.argsort(spectral.labels_)
for i,m in enumerate(methods):
print "%s:%d\t%s"%(m[1],spectral.labels_[i],m[0])
print "Adj. Rand Score: %f"%adjusted_rand_score(spectral.labels_,true_method_groups)
示例3: mgsparse
# 需要导入模块: from sklearn.covariance import GraphLassoCV [as 别名]
# 或者: from sklearn.covariance.GraphLassoCV import fit [as 别名]
def mgsparse(matrix, dimred=0, cutoff=1, eigtype = 'b'):
''' Plot ROC curve for using sparse covariance and precision matrix for multivariate gaussian classifier
Input:
matrix = n-by-m pandas data frame, each row is one bacteria strain, each column is one subject
dimred = reduce dimensionality of covariance and inverse of covariance to n < len(x) if n is specified specified,
else no reduction in dimensions
cutoff = cutoff to top eigenvalues if specified, maybe less than n
eigtype = pick n random(r), biggest(b) or smallest(s) eigenvalues to construct matrix B, default is biggest(b)
Output:
auc = area under ROC curve
'''
# convert matrix from Pandas data frame to array
m = matrix.values
# control and CD subjects
con = (m.T[252:])
cd = (m.T[:252])
# get mean for each strain
conmean = vmean(con.T)
cdmean = vmean(cd.T)
# sparse covariance and precision matrix for control
conglasso = GraphLassoCV()
conglasso.fit(con)
concov = conglasso.covariance_
concovinv = conglasso.precision_
# covariance and precision matrix for CD
cdglasso = GraphLassoCV()
cdglasso.fit(cd)
cdcov = cdglasso.covariance_
cdcovinv = cdglasso.precision_
listac = ndgaussianfitsparse(c24g, c24gmean, sc24gcov, sc24gcovinv, dimred = r)
listbc = ndgaussianfitsparse(c24g, cd24gmean, scd24gcov, scd24gcovinv, dimred = r)
listacd = ndgaussianfitsparse(cd24g, c24gmean, sc24gcov, sc24gcovinv, dimred= r)
listbcd = ndgaussianfitsparse(cd24g, cd24gmean, scd24gcov, scd24gcovinv, dimred= r)
auc = ndaucsklearn(listac, listbc, listacd, listbcd, 252, 172, tol = 2)
return auc
开发者ID:Santhosh97,项目名称:Classification-Tests-on-Gut-Microbiome-Composition-Data,代码行数:50,代码来源:Microbiome.py
示例4: computePartialCorrelationsCV
# 需要导入模块: from sklearn.covariance import GraphLassoCV [as 别名]
# 或者: from sklearn.covariance.GraphLassoCV import fit [as 别名]
def computePartialCorrelationsCV(coupling_data):
# standardize
coupling_data -= coupling_data.mean(axis=0)
coupling_data /= coupling_data.std(axis=0)
estimator = GraphLassoCV(alphas=10)
estimator.fit(coupling_data)
prec = estimator.get_precision()
reg_alpha = estimator.alpha_
#### partial correlations: rho_ij = - p_ij/ sqrt(p_ii * p_jj)
#diagonal of precision matrix
prec_diag = np.diag(prec)
partial_correlations = -prec / np.sqrt(np.outer(prec_diag, prec_diag))
# set lower half to zero
partial_correlations[np.tril_indices(400)] = 0
return estimator.get_precision(), partial_correlations, reg_alpha
示例5: main
# 需要导入模块: from sklearn.covariance import GraphLassoCV [as 别名]
# 或者: from sklearn.covariance.GraphLassoCV import fit [as 别名]
def main():
sample, genes, raw_expression, cov = load_data()
expression = raw_expression[raw_expression.min(1) > 100]
expression_indices = numpy.nonzero(raw_expression.sum(1) > 6)[0].tolist()
## reorder and filter data
#rep1_cols = numpy.array((3,0,5)) # 8 is co culture
#rep2_cols = numpy.array((4,2,7)) # 9 is MRC5
expression = expression[:,(3,4,0,2,5,7)]
# log data
expression = numpy.log10(expression + 1)[1:100,]
cov = expression.dot(expression.T)
print cov.shape
#mo = GraphLasso(alpha=95, mode='lars', verbose=True) #, cv=KFold(3,2), n_jobs=24)
mo = GraphLassoCV(mode='lars', verbose=True, cv=KFold(3,2), n_jobs=24)
sparse_cov = mo.fit(cov)
print( numpy.nonzero(sparse_cov)[0].sum() )
return
示例6: GroupSparseCovarianceCV
# 需要导入模块: from sklearn.covariance import GraphLassoCV [as 别名]
# 或者: from sklearn.covariance.GraphLassoCV import fit [as 别名]
gsc = GroupSparseCovarianceCV(max_iter=50, verbose=1)
gsc.fit(subjects)
for n in range(n_displayed):
plt.subplot(n_displayed, 4, 4 * n + 2)
plot_matrix(gsc.precisions_[..., n])
if n == 0:
plt.title("group-sparse\n$\\alpha=%.2f$" % gsc.alpha_)
# Fit one graph lasso per subject
from sklearn.covariance import GraphLassoCV
gl = GraphLassoCV(verbose=1)
for n, subject in enumerate(subjects[:n_displayed]):
gl.fit(subject)
plt.subplot(n_displayed, 4, 4 * n + 3)
plot_matrix(gl.precision_)
if n == 0:
plt.title("graph lasso")
plt.ylabel("$\\alpha=%.2f$" % gl.alpha_)
# Fit one graph lasso for all subjects at once
import numpy as np
gl.fit(np.concatenate(subjects))
plt.subplot(n_displayed, 4, 4)
plot_matrix(gl.precision_)
plt.title("graph lasso, all subjects\n$\\alpha=%.2f$" % gl.alpha_)
示例7: GraphLassoCV
# 需要导入模块: from sklearn.covariance import GraphLassoCV [as 别名]
# 或者: from sklearn.covariance.GraphLassoCV import fit [as 别名]
mask_img=mask_file, maps_img=icas_path, resampling_target='mask',
standardize=True, detrend=True)
nmm.fit()
nmm.maps_img_.to_filename('dbg_ica_maps.nii.gz')
FS_netproj = nmm.transform(all_sub_rs_maps)
np.save('%i_nets_timeseries' % sub_id, FS_netproj)
# compute network sparse inverse covariance
from sklearn.covariance import GraphLassoCV
from nilearn.image import index_img
from nilearn import plotting
try:
gsc_nets = GraphLassoCV(verbose=2, alphas=20)
gsc_nets.fit(FS_netproj)
np.save('%i_nets_cov' % sub_id, gsc_nets.covariance_)
np.save('%i_nets_prec' % sub_id, gsc_nets.precision_)
except:
pass
###############################################################################
# dump region poolings
###############################################################################
from nilearn.image import resample_img
crad = ds.fetch_atlas_craddock_2012()
# atlas_nii = index_img(crad['scorr_mean'], 19) # Craddock 200 region atlas
atlas_nii = index_img(crad['scorr_mean'], 9) # Craddock 100 region atlas
示例8: scale
# 需要导入模块: from sklearn.covariance import GraphLassoCV [as 别名]
# 或者: from sklearn.covariance.GraphLassoCV import fit [as 别名]
# -*- coding: utf-8 -*-
"""
Created on Mon Sep 12 10:16:16 2016
@author: jonyoung
"""
import connectivity_utils as utils
import numpy as np
import scipy.linalg as la
from sklearn.covariance import GraphLassoCV, ledoit_wolf, GraphLasso
from sklearn.preprocessing import scale
connectivity_data = utils.load_hcp_matrix('/home/jonyoung/IoP_data/Data/HCP_PTN820/node_timeseries/3T_HCP820_MSMAll_d15_ts2/715950.txt');
print connectivity_data
print np.shape(connectivity_data)
print np.std(connectivity_data, axis = 1)
connectivity_data = connectivity_data[:, :250]
X = scale(connectivity_data, axis=1)
model = GraphLassoCV(max_iter=1500, assume_centered=True)
model.fit(np.transpose(X))
示例9: print
# 需要导入模块: from sklearn.covariance import GraphLassoCV [as 别名]
# 或者: from sklearn.covariance.GraphLassoCV import fit [as 别名]
# ---------------------
#
# All starts with the estimation of the signals **covariance** matrix. Here the
# number of ROIs exceeds the number of samples,
print("time series has {0} samples".format(timeseries.shape[0]))
###############################################################################
# in which situation the graphical lasso **sparse inverse covariance**
# estimator captures well the covariance **structure**.
from sklearn.covariance import GraphLassoCV
covariance_estimator = GraphLassoCV(verbose=1)
###############################################################################
# We just fit our regions signals into the `GraphLassoCV` object
covariance_estimator.fit(timeseries)
###############################################################################
# and get the ROI-to-ROI covariance matrix.
matrix = covariance_estimator.covariance_
print("Covariance matrix has shape {0}.".format(matrix.shape))
###############################################################################
# Plot matrix and graph
# ---------------------
#
# We use `matplotlib` plotting functions to visualize our correlation matrix
# and display the graph of connections with `nilearn.plotting.plot_connectome`.
import matplotlib.pyplot as plt
from nilearn import plotting
示例10: NiftiMapsMasker
# 需要导入模块: from sklearn.covariance import GraphLassoCV [as 别名]
# 或者: from sklearn.covariance.GraphLassoCV import fit [as 别名]
# Extract time series
# --------------------
from nilearn.input_data import NiftiMapsMasker
masker = NiftiMapsMasker(maps_img=atlas_filename, standardize=True,
memory='nilearn_cache', verbose=5)
time_series = masker.fit_transform(data.func[0],
confounds=data.confounds)
##############################################################################
# Compute the sparse inverse covariance
# --------------------------------------
from sklearn.covariance import GraphLassoCV
estimator = GraphLassoCV()
estimator.fit(time_series)
##############################################################################
# Display the connectome matrix
# ------------------------------
from matplotlib import pyplot as plt
# Display the covariance
plt.figure(figsize=(10, 10))
# The covariance can be found at estimator.covariance_
plt.imshow(estimator.covariance_, interpolation="nearest",
vmax=1, vmin=-1, cmap=plt.cm.RdBu_r)
# And display the labels
x_ticks = plt.xticks(range(len(labels)), labels, rotation=90)
y_ticks = plt.yticks(range(len(labels)), labels)
示例11: datetime
# 需要导入模块: from sklearn.covariance import GraphLassoCV [as 别名]
# 或者: from sklearn.covariance.GraphLassoCV import fit [as 别名]
start = datetime(2014, 1, 1, 0, 0, 0, 0, pytz.utc)
end = datetime(2016, 1, 1, 0, 0, 0, 0, pytz.utc)
quotes = [quotes_historical_yahoo(symbol, start, end, asobject=True) for symbol in symbols]
qopen = np.array([q.open for q in quotes]).astype(np.float)
qclose = np.array([q.close for q in quotes]).astype(np.float)
variation= qclose - qopen #per day variation in price for each symbol
X = variation.T
X /= X.std(axis=0) #standardize to use correlations rather than covariance
#estimate inverse covariance
graph = GraphLassoCV()
graph.fit(X)
gl_cov = graph.covariance_
gl_prec = graph.precision_
gl_alphas =graph.cv_alphas_
gl_scores = np.mean(graph.grid_scores, axis=1)
plt.figure()
sns.heatmap(gl_prec)
plt.figure()
plt.plot(gl_alphas, gl_scores, marker='o', color='b', lw=2.0, label='GraphLassoCV')
plt.title("Graph Lasso Alpha Selection")
plt.xlabel("alpha")
plt.ylabel("score")
plt.legend()
示例12: GraphicLasso
# 需要导入模块: from sklearn.covariance import GraphLassoCV [as 别名]
# 或者: from sklearn.covariance.GraphLassoCV import fit [as 别名]
def GraphicLasso(X):
model = GraphLassoCV()
model.fit(X)
cov_ = model.covariance_
prec_ = model.precision_
return prec_
示例13: empirical_covariance
# 需要导入模块: from sklearn.covariance import GraphLassoCV [as 别名]
# 或者: from sklearn.covariance.GraphLassoCV import fit [as 别名]
c0.append(temp_A[0])
c1.append(temp_A[1])
c2.append(temp_B)
data = np.array([c0, c1, c2])
data = data.transpose()
print data
# emp_cov = empirical_covariance(data, assume_centered=False)
#
# print emp_cov
model = GraphLassoCV()
model.fit(data)
cov_ = model.covariance_
prec_ = model.precision_
corr = np.corrcoef(data, rowvar=False)
print corr
# print cov_
# print prec_
threshold = 0.1
示例14: get_EFA_HCA
# 需要导入模块: from sklearn.covariance import GraphLassoCV [as 别名]
# 或者: from sklearn.covariance.GraphLassoCV import fit [as 别名]
EFA=True
survey_HCA = get_EFA_HCA(all_results['survey'], EFA)
survey_order = survey_HCA['reorder_vec']
task_HCA = get_EFA_HCA(all_results['task'], EFA)
task_order = task_HCA['reorder_vec']
all_data = pd.concat([all_results['task'].data.iloc[:, task_order],
all_results['survey'].data.iloc[:, survey_order]],
axis=1)
out, tuning = qgraph_cor(all_data, glasso=True, gamma=.5)
# recreate with sklearn just to check
data = scale(all_data)
clf = GraphLassoCV()
clf.fit(data)
sklearn_covariance = clf.covariance_[np.tril_indices_from(clf.covariance_)]
qgraph_covariance = out.values[np.tril_indices_from(out)]
method_correlation = np.corrcoef(sklearn_covariance, qgraph_covariance)[0,1]
assert method_correlation > .99
def add_attributes(g):
g.vs['measurement'] = ['task']*len(task_order) + ['survey']*len(survey_order)
task_clusters = task_HCA['labels'][task_order]
survey_clusters = survey_HCA['labels'][survey_order] + max(task_clusters)
g.vs['cluster'] = np.append(task_clusters, survey_clusters)
save_loc = path.join(path.dirname(all_results['task'].get_output_dir()),
'graph_results')
makedirs(save_loc, exist_ok=True)
示例15: GraphLassoCV
# 需要导入模块: from sklearn.covariance import GraphLassoCV [as 别名]
# 或者: from sklearn.covariance.GraphLassoCV import fit [as 别名]
cov = linalg.inv(prec)
d = np.sqrt(np.diag(cov))
cov /= d
cov /= d[:, np.newaxis]
prec *= d
prec *= d[:, np.newaxis]
X = prng.multivariate_normal(np.zeros(n_features), cov, size=n_samples)
X -= X.mean(axis=0)
X /= X.std(axis=0)
##############################################################################
# Estimate the covariance
emp_cov = np.dot(X.T, X) / n_samples
model = GraphLassoCV()
model.fit(X)
cov_ = model.covariance_
prec_ = model.precision_
lw_cov_, _ = ledoit_wolf(X)
lw_prec_ = linalg.inv(lw_cov_)
##############################################################################
# Plot the results
pl.figure(figsize=(10, 6))
pl.subplots_adjust(left=0.02, right=0.98)
# plot the covariances
covs = [('Empirical', emp_cov), ('Ledoit-Wolf', lw_cov_),
('GraphLasso', cov_), ('True', cov)]
vmax = cov_.max()