本文整理汇总了Python中population.Population.sample方法的典型用法代码示例。如果您正苦于以下问题:Python Population.sample方法的具体用法?Python Population.sample怎么用?Python Population.sample使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类population.Population
的用法示例。
在下文中一共展示了Population.sample方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: run_synth_test
# 需要导入模块: from population import Population [as 别名]
# 或者: from population.Population import sample [as 别名]
def run_synth_test():
""" Run a test with synthetic data and MCMC inference
"""
# Parse command line args
(options, args) = parse_cmd_line_args()
print "Creating master population object"
model = make_model(options.model, N=options.N)
popn = Population(model)
results_file = os.path.join(options.resultsDir, "geweke_results.pkl")
if os.path.exists(results_file) and not options.force_recompute:
with open(results_file) as f:
x_smpls = cPickle.load(f)
else:
x0 = popn.sample()
data = gen_synth_data(popn, x0, options.N, options.T)
popn.set_data(data)
# Perform inference
N_samples = 1000
x_smpls = geweke_test(popn, data, N_samples=N_samples)
# Save results
print "Saving results to %s" % results_file
with open(results_file, "w") as f:
cPickle.dump(x_smpls, f, protocol=-1)
# Plot empirical parameter distributions
print "Plotting results."
plot_geweke_results(popn, x_smpls, model, resdir=options.resultsDir)
示例2: sample_network_from_prior
# 需要导入模块: from population import Population [as 别名]
# 或者: from population.Population import sample [as 别名]
def sample_network_from_prior(model):
"""
Sample a network from the prior
"""
popn = Population(model)
x_true = popn.sample()
A_true = x_true['net']['graph']['A']
return popn, x_true, A_true
示例3: PWLinear
# 需要导入模块: from population import Population [as 别名]
# 或者: from population.Population import sample [as 别名]
class PWLinear(object):
"""
Use an array of (non-negative) function values on a grid to define a
piecewise-linear pdf. Methods evaluate the pdf and draw samples from it.
"""
# We'll ignore periods with marginal density below exp(smallexp)
# times the max. exp(-21) = 7.6e-10 (~ 1/billion).
smallexp = -21.
def __init__(self, lo=None, hi=None, vals=None, absc=None,
step='lin', logvals=False, cut=1.e-9):
"""
Define a piecewise linear probability density from an array of
(non-negative) function values. The algorithm targets use with
very large arrays that might include large regions of negligibly
small values and should remain efficient in that case.
Parameters
----------
lo, hi : real
If provided, these define the domain of the pdf, and `vals` is
assumed to be values on a uniform grid over the domain. `absc`
must not be provided if `lo` and `hi` are provided.
absc : array_like
If provided, this array specifies the argument (abscissa) for each
value in `vals`. `lo` and `hi` must not be provided if `absc` is
provided. `step` is ignored.
vals : array_like
Non-negative values defining the pdf. They need not be normalized.
cut : real
Specifies a cutoff; density values a factor of `cut` below the
maximum density will be treated as zero. Must be nonzero if
`logvals` is True.
"""
# Define the domain and abscissas.
if not (lo is None) and not (hi is None):
if step != 'lin':
raise NotImplementedError('Only linear step is implemented!')
self.lo, self.hi = lo, hi
self.n = len(vals)
self.step = 'lin'
self.absc, self.delta = linspace(lo, hi, self.n, retstep=True)
elif absc is None:
raise ValueError('Must specify either lo and hi, or absc!')
else:
self.n = len(vals)
if len(absc) != self.n:
raise ValueError('Length mismatch between absc and vals!')
self.lo, self.hi = absc[0], absc[-1]
self.absc = absc
self.step = None # indicates arbitrary step sizes
# Define and select the non-negligible values, 'weight values.'
if logvals:
wvals = vals - max(vals)
selected = (wvals > log(cut)).nonzero()[0]
negl = (wvals <= log(cut)).nonzero()[0]
wvals[selected] = exp(wvals[selected])
wvals[negl] = 0.
else:
wvals = vals/max(vals)
selected = (wvals > cut).nonzero()[0]
# Break up the selection into contiguous regions.
# First get lists of indices to nonzero values in each region.
regions = []
region = [ selected[0] ]
for n in selected[1:]:
if n==region[-1]+1:
region.append(n)
else:
regions.append(region)
region = [n]
regions.append(region)
# Now adjust interior boundaries to include a bounding zero value.
# Just keep track of the range spanned by each region.
# Start with first region, which may start at index 0.
lo, hi = regions[0][0], regions[0][-1]
if lo > 0 and wvals[lo] > 0.: # must do 2nd test in case cut=0.
lo -= 1
if hi < self.n-1 and wvals[hi] > 0.:
hi += 1
ranges = [ (lo, hi) ]
# Run over remaining regions.
for region in regions[1:]:
lo, hi = region[0], region[-1]
if wvals[lo] > 0.:
lo -= 1
if hi < self.n-1 and wvals[hi] > 0.:
hi += 1
ranges.append((lo,hi))
# Now, by region, count intervals and calculate weights for nonzero
# intervals.
self.n_intvls = 0
self.regions = []
nz_wts = [] # wts for regions with nonzero support
nz_intvls = []
#.........这里部分代码省略.........
示例4: gibbs_sample
# 需要导入模块: from population import Population [as 别名]
# 或者: from population.Population import sample [as 别名]
def gibbs_sample(population,
data,
N_samples=1000,
x0=None,
init_from_mle=True):
"""
Sample the posterior distribution over parameters using MCMC.
"""
N = population.model['N']
# Draw initial state from prior if not given
if x0 is None:
x0 = population.sample()
if init_from_mle:
print "Initializing with coordinate descent"
from models.model_factory import make_model, convert_model
from population import Population
mle_model = make_model('standard_glm', N=N)
mle_popn = Population(mle_model)
mle_popn.set_data(data)
mle_x0 = mle_popn.sample()
# Initialize with MLE under standard GLM
mle_x0 = coord_descent(mle_popn, data, x0=mle_x0, maxiter=1)
# Convert between inferred parameters of the standard GLM
# and the parameters of this model. Eg. Convert unweighted
# networks to weighted networks with normalized impulse responses.
x0 = convert_model(mle_popn, mle_model, mle_x0, population, population.model, x0)
# Create updates for this population
serial_updates, parallel_updates = initialize_updates(population)
# DEBUG Profile the Gibbs sampling loop
import cProfile, pstats, StringIO
pr = cProfile.Profile()
pr.enable()
# Alternate fitting the network and fitting the GLMs
x_smpls = [x0]
x = x0
import time
start_time = time.clock()
for smpl in np.arange(N_samples):
# Print the current log likelihood
lp = population.compute_log_p(x)
# Compute iters per second
stop_time = time.clock()
if stop_time - start_time == 0:
print "Gibbs iteration %d. Iter/s exceeds time resolution. Log prob: %.3f" % (smpl, lp)
else:
print "Gibbs iteration %d. Iter/s = %f. Log prob: %.3f" % (smpl,
1.0/(stop_time-start_time),
lp)
start_time = stop_time
# Go through each parallel MH update
for parallel_update in parallel_updates:
for n in np.arange(N):
parallel_update.update(x, n)
# Sample the serial updates
for serial_update in serial_updates:
serial_update.update(x)
x_smpls.append(copy.deepcopy(x))
pr.disable()
s = StringIO.StringIO()
sortby = 'cumulative'
ps = pstats.Stats(pr, stream=s).sort_stats(sortby)
ps.print_stats()
with open('mcmc.prof.txt', 'w') as f:
f.write(s.getvalue())
f.close()
return x_smpls
示例5: Driver
# 需要导入模块: from population import Population [as 别名]
# 或者: from population.Population import sample [as 别名]
#.........这里部分代码省略.........
The main function of this class. Returns a 3-tuple: (distribution, spent, individuals_seen)
'distribution' is a population object representing what we learned about each type.
'spent' is how much money we paid out, 'individuals_seen' is how many individuals we talked to.
'''
def run(self):
if self.constraint_type == COST:
return self.run_cost_constraint()
elif self.constraint_type == STEPS:
return self.run_steps_constraint()
elif self.constraint_type == BASELINE:
return self.run_baseline()
else:
raise ValueError("Unknown constraint type")
def get_distribution_for_type(self, priv_type):
return self.population.distribution[priv_type]
def update_costs(self, cost):
self.costs.append(cost)
def update_divergences(self, learned_pop):
js_div = utils.joint_jsdivergence(self.population.distribution, learned_pop.distribution, self.population.probability, learned_pop.probability)
self.divergences.append(js_div)
def run_cost_constraint(self):
budget = self.constraint_val
spent = 0.0
individuals_seen = 0
count = [0 for i in range(self.population.num_types)]
while spent < budget:
# sample an individual from the population
(priv_type, cost) = self.population.sample()
count[priv_type] += 1
accepted, rejected_offer, accepted_offer = self.offer_process(cost)
if accepted:
print "Offer #" + str(i) + ":\t" + str(rejected_offer) + " Rejected\t" + str(accepted_offer) + " Accepted"
spent += accepted_offer
ret_type = priv_type
else:
print "Offer #" + str(i) + ":\t" + str(rejected_offer) + " Rejected"
ret_type = OFFER_REJECTED
self.learner.update(ret_type, rejected_offer, accepted_offer)
individuals_seen += 1
self.update_costs(spent)
learned_pop = self.learner.get_prediction()
self.update_divergences(learned_pop)
#return (self.learner.get_prediction(), spent, individuals_seen, self.divergences)
self.results["prediction"] = self.learner.get_prediction()
self.results["spent"] = spent
self.results["individuals_seen"] = individuals_seen
return self.results
def run_steps_constraint(self):
steps = self.constraint_val
spent = 0.0
count = [0 for i in range(self.population.num_types)]
for i in xrange(steps):
# sample an individual from the population
(priv_type, cost) = self.population.sample()
示例6: run_gen_synth_data
# 需要导入模块: from population import Population [as 别名]
# 或者: from population.Population import sample [as 别名]
def run_gen_synth_data():
""" Run a test with synthetic data and MCMC inference
"""
options, args = parse_cmd_line_args()
# Create the model
model = make_model(options.model, N=options.N)
# Set the sparsity level to minimize the risk of unstable networks
stabilize_sparsity(model)
print "Creating master population object"
popn = Population(model)
# Sample random parameters from the model
x_true = popn.sample()
# Check stability of matrix
assert check_stability(model, x_true, options.N), "ERROR: Sampled network is unstable!"
# Save the model so it can be loaded alongside the data
fname_model = os.path.join(options.resultsDir, 'model.pkl')
print "Saving data to %s" % fname_model
with open(fname_model,'w') as f:
cPickle.dump(model, f, protocol=-1)
print "Generating synthetic data with %d neurons and %.2f seconds." % \
(options.N, options.T_stop)
# Set simulation parametrs
dt = 0.001
dt_stim = 0.1
D_stim = 1
stim = np.random.randn(options.T_stop/dt_stim, D_stim)
data = gen_synth_data(options.N, options.T_stop, popn, x_true, dt, dt_stim, D_stim, stim)
# Set the data so that the population state can be evaluated
popn.set_data(data)
# DEBUG Evaluate the firing rate and the simulated firing rate
state = popn.eval_state(x_true)
for n in np.arange(options.N):
lam_true = state['glms'][n]['lam']
lam_sim = popn.glm.nlin_model.f_nlin(data['X'][:,n])
assert np.allclose(lam_true, lam_sim)
# Save the data for reuse
#fname_mat = os.path.join(options.resultsDir, 'data.mat')
#print "Saving data to %s" % fname_mat
#scipy.io.savemat(fname_mat, data, oned_as='row')
# Pickle the data so we can open it more easily
fname_pkl = os.path.join(options.resultsDir, 'data.pkl')
print "Saving data to %s" % fname_pkl
with open(fname_pkl,'w') as f:
cPickle.dump(data, f, protocol=-1)
# Plot firing rates, stimulus responses, etc
plot_results(popn, data['vars'],
resdir=options.resultsDir,
do_plot_stim_resp=False,
do_plot_imp_responses=False)