本文整理汇总了Python中numpy.histogramdd函数的典型用法代码示例。如果您正苦于以下问题:Python histogramdd函数的具体用法?Python histogramdd怎么用?Python histogramdd使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了histogramdd函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: bin_sum
def bin_sum(r, f, bins=10):
"""Binned sum of function f(r)
Parameters:
r: independent variable to be binned over
f: function to be summed
bins: (default 10): number of bins or bin edges `len(nbins)+1`
Returns:
total: the total value per bin
count: number of values summed per bin (histogram)
bins: bin edges
"""
multi = isinstance(f, tuple)
if bins is 1:
if r.dtype.kind not in 'iu':
assert np.allclose(r, np.around(r)), 'need integer array for bins=1'
print 'converting to int array'
r = r.astype(int)
count = np.bincount(r)
if multi:
total = [np.bincount(r, weights=fi) for fi in f]
else:
total = np.bincount(r, weights=f)
bins = np.arange(len(count)+1)
else:
count, bins = np.histogramdd(r, bins)
if multi:
total = [np.histogramdd(r, bins, weights=fi)[0] for fi in f]
else:
total = np.histogramdd(r, bins, weights=f)[0]
if len(bins) == 1:
bins = bins[0]
return total, count.astype(int), bins
示例2: myhist
def myhist(x,weights=None,**histkw):
"""
Multidimensional histogram with option for multidimensional weights
"""
# Pars input
x=mmlpars.mml_pars(x,type=np.ndarray)
xshp=x.shape ; N=xshp[0]
# Histogram w/o weights
if weights==None: hist,bins=np.histogramdd(x,**histkw)
# Histogram w/ weights
else:
wshp=weights.shape
# Histogram w/ scaler weights
if wshp==(N,): hist,bins=np.histogramdd(x,weights=weights,**histkw)
# Histogram w/ vector weights
else:
# Handle errors
if wshp[0]!=N: raise Exception('Weights must have same size first dimension as data. (data.shape={},weights.shape={})'.format(xshp,wshp))
if len(wshp)>2: raise Exception('Weights with more than 2 dimensions not supported. (weights.shape={})'.format(wshp))
# Get histograms
owshp=tuple(list(histlist[0].shape)+[wshp[1]])
hist=np.zeros(owshp)
for iw in range(wshp[1]):
ihist,bins=np.histogramdd(x,weights=weights[:,iw],**histkw)
if iw==0: histkw['bins']=bins
if len(xshp)==1: hist[:,iw]=ihist
elif len(xshp)==2: hist[:,:,iw]=ihist
elif len(xshp)==3: hist[:,:,:,iw]=ihist
elif len(xshp)==4: hist[:,:,:,:,iw]=ihist
else: raise Exception('Multidimensional weights only supported for data with <4 dimensions. (data.shape={})'.format(xshp))
# Return output
return hist,bins
示例3: grid_21cm
def grid_21cm(self,data=None):
deltaT=np.empty_like(data)
for i in np.arange(freq.shape[0]):
deltaT[i]=data[i]-data[i].mean()
nn=np.histogramdd(self.DPM,bins=(self.bin_x,self.bin_y,self.bin_z))[0]
T,edges=np.histogramdd(self.DPM,bins=(self.bin_x,self.bin_y,self.bin_z),weights=deltaT.reshape(-1))
return T
示例4: bin_by_mean
def bin_by_mean(lon, lat, z, bins=10, range=None):
bins = bins[::-1]
range = range[::-1]
w_sum, _ = np.histogramdd((lat, lon), weights=z, bins=bins, range=range)
n_pts, edges = np.histogramdd((lat, lon), bins=bins, range=range)
n_pts[n_pts==0] = np.nan
return (w_sum/n_pts), n_pts, edges[1], edges[0]
示例5: test3
def test3(self):
print "Testing the user interface, random base_distribution_type"
number_of_particles = 10000
test_grid = self.setup_simple_grid()
sph_particles = convert_grid_to_SPH(test_grid, number_of_particles,
base_distribution_type = "random", seed = 12345)
self.assertEqual(len(sph_particles), number_of_particles)
self.assertAlmostEqual(sph_particles.mass.sum(), 1.5 | units.kg)
self.assertAlmostEqual(sph_particles.velocity, [3.0, 4.0, 0.0] | units.m/units.s)
self.assertAlmostEqual(sph_particles.u, 1.0 | (units.m/units.s)**2)
# For 'random', the number of particles in a cell should scale only on average
# with the amount of mass in the cell:
self.assertAlmostRelativeEqual(
((1.5 | units.kg)/number_of_particles * numpy.histogramdd(
sph_particles.position.value_in(units.m), bins=(4,3,2))[0]).sum(),
(test_grid.rho*test_grid.cellsize().prod()).sum(),
places = 2
)
self.assertRaises(AssertionError,
self.assertAlmostRelativeEqual,
(1.5 | units.kg)/number_of_particles * numpy.histogramdd(sph_particles.position.value_in(units.m), bins=(4,3,2))[0],
test_grid.rho*test_grid.cellsize().prod(),
places = 2,
)
self.assertAlmostEqual(sph_particles.h_smooth, (50.0/number_of_particles)**(1.0/3) | units.m)
示例6: __init__
def __init__(self, samples, recovery, bins=32, range=None,
transit_lnprob_function=None):
# Make sure that the samples have the correct format.
samples = np.atleast_2d(samples)
# Compute the recovery and injection histograms.
img_all, self.bins = np.histogramdd(samples, bins=bins, range=range)
img_yes, tmp = np.histogramdd(samples[recovery], bins=self.bins)
self.setup()
# Compute the completeness asserting zero completeness where there
# were no injections.
lncompleteness = -np.inf + np.zeros(img_yes.shape, dtype=float)
m = img_all > 0
lncompleteness[m] = np.log(img_yes[m]) - np.log(img_all[m])
# Compute the transit probability if a function was given.
if transit_lnprob_function is None:
lnprob = np.array(lncompleteness)
else:
args = np.meshgrid(*(self.bin_centers), indexing="ij")
transit_lnprob = transit_lnprob_function(*args)
lnprob = lncompleteness + transit_lnprob
# Expand the completeness and probability grids to have zeros around
# the edges.
self.lncompleteness = -np.inf + np.zeros(np.array(img_yes.shape)+2,
dtype=float)
self.lncompleteness[[slice(1, -1)] * len(self.bins)] = lncompleteness
self.lnprob = -np.inf + np.zeros(np.array(img_yes.shape)+2,
dtype=float)
self.lnprob[[slice(1, -1)] * len(self.bins)] = lnprob
示例7: spline_fit
def spline_fit(data,
bins = None,
range = None,
weights = None,
order = None,
filename = 'spline.fits'):
if bins is None:
bins = data.shape[1]*[10]
counts,bin_arrays = np.histogramdd(data,range=range,weights=weights)
vars,bin_arrays = np.histogramdd(data,range=range,weights=weights**2)
else:
counts,bin_arrays = np.histogramdd(data,bins=bins,range=range,weights=weights)
vars,bin_arrays = np.histogramdd(data,bins=bins,range=range,weights=weights**2)
coords = [(b[1:]+b[:-1])/2. for b in bin_arrays]
if order == None:
order = list(np.zeros_like(bins))
knots = pad_knots(bin_arrays, order)
w = 1./np.sqrt(vars)
w[~np.isfinite(w)] = np.nanmin(w)
result = glam.fit(counts,w,coords,knots,order,0)
if not filename is None:
if os.path.exists(filename):
os.system('rm '+filename)
if filename[-5:]=='.fits':
splinefitstable.write(result,filename)
else:
splinefitstable.write(result,filename+'.fits')
return result
示例8: transfer_entropy
def transfer_entropy(ts1, ts2, lag=2, bins=5):
""" D_1<-2 """
ts1, lts1 = multi_lag(ts1, lag)
ts2, lts2 = multi_lag(ts2, lag)
# P(i_n+1, i_(n), j_(n))
joint = np.histogramdd([ts1] + lts1 + lts2, bins=bins)[0]
joint = normalize(joint)
# P(i_n+1, i_(n))
auto = np.histogramdd([ts1] + lts1, bins=bins)[0]
auto = normalize(auto)
# P(i_(n))
lag1 = np.histogramdd(lts1, bins=bins)[0]
lag1 = normalize(lag1)
# P(i_(n), j_(n))
lag12 = np.histogramdd(lts1 + lts2, bins=bins)[0]
lag12 = normalize(lag12)
# P(i_n+1 | i_(n), j_(n))
jcond = np.divide(joint.T, lag12.T).T
jcond = clean(jcond)
jcond = do_cpdf(jcond.T, avg_zeros).T
# P(i_n+1 | i_(n))
acond = np.divide(auto.T, lag1.T).T
acond = clean(acond)
acond = do_cpdf(acond.T, avg_zeros).T
# E[log P(i_n+1 | i_(n), j_(n)) / P(i_n+1 | i_(n))]
transfer = joint * clean(np.log(np.divide(jcond, acond)))
return transfer.sum()
示例9: MeasureColorVector
def MeasureColorVector(self,img,HistSizes):
""" Returns a color vector obtained by histogram
of number of bins mentioned in HistSizes for each color layer """
ImgX,ImgY,ImgZ = img.shape
# First define masks for the partions in the image
Mask0 = np.zeros(img.shape)
Mask0[ImgX/3:2*ImgX/3,ImgY/3:2*ImgY/3,:] = 1 # Central 1/3 region rectangle
Vector0 = np.histogramdd(img[Mask0], bins=HistSizes, normed=True)
Mask1 = np.zeros(img.shape)
Mask1[0:ImgX/2,0:ImgY/2,:] = 1 # 1st quadrent
Mask1[0:ImgX/2,0:ImgY/2,:] -= Mask0[0:ImgX/2,0:ImgY/2,:]
Vector1 = np.histogramdd(img[Mask1], bins=HistSizes, normed=True)
Mask2 = np.zeros(img.shape)
Mask2[ImgX/2:,0:ImgY/2,:] = 1 # 2nd quadrent
Mask2[ImgX/2:,0:ImgY/2,:] -= Mask0[ImgX/2:,0:ImgY/2,:]
Vector2 = np.histogramdd(img[Mask2], bins=HistSizes, normed=True)
Mask3 = np.zeros(img.shape)
Mask3[0:ImgX/2,ImgY/2:,:] = 1 # 3rd quadrent
Mask3[0:ImgX/2,ImgY/2:,:] -= Mask0[0:ImgX/2,ImgY/2:,:]
Vector3 = np.histogramdd(img[Mask3], bins=HistSizes, normed=True)
Mask4 = np.zeros(img.shape)
Mask4[ImgX/2:,ImgY/2:,:] = 1 # 4th quadrent
Mask4[ImgX/2:,ImgY/2:,:] -= Mask0[ImgX/2:,ImgY/2:,:]
Vector4 = np.histogramdd(img[Mask4], bins=HistSizes, normed=True)
return np.concatenate((Vector0,Vector1,Vector2,Vector3,Vector4))
示例10: Get_2DTProfile
def Get_2DTProfile(ar1, ar2, ar3, nbBinsX, nbBinsY,we):
'''
'''
d = numpy.array(zip(ar1,ar2,ar3))
number, axis = numpy.histogramdd( d, (nbBinsX,nbBinsY,1))
weight, axis = numpy.histogramdd( d, (nbBinsX,nbBinsY,1), weights=we )
mean, axis = numpy.histogramdd( d, (nbBinsX,nbBinsY,1), weights=we*ar3)
err, axis = numpy.histogramdd( d, (nbBinsX,nbBinsY,1), weights=we*(ar3**2.))
mean /= weight
err = numpy.sqrt((err/weight-mean**2.)/number)
mean = mean[:,:,0]
err = err[:,:,0]
number = number[:,:,0]
### find the axis X
#axisX = axis[0]
#axisX = numpy.array([ axisX[i]+(axisX[i+1]-axisX[i])/2. for i in range(0,axisX.size-1) ])
### find the axis Y
#axisY = axis[1]
#axisY = numpy.array([ axisY[i]+(axisY[i+1]-axisY[i])/2. for i in range(0,axisY.size-1) ])
### For test look at the histo
#plt.imshow(mean,origin='lower',extent=[0., 10., 0., 10.],interpolation='None')
#cbar = plt.colorbar()
#plt.show()
return mean, err, number
示例11: chi2div
def chi2div(y, x, idx1, idx2, edges):
n1, edges1 = np.histogramdd([y[idx1], x[idx1]], bins=edges)
n2, edges2 = np.histogramdd([y[idx2], x[idx2]], bins=edges)
chi2d = sum(len(idx1)*pow(n1 - n2, 2)/(2*n2*(len(idx1)-n2)))
#chi2 = m*(n1 - n2).^2./(2*n2.*(m-n2));
return chi2d
示例12: bin_by_mean
def bin_by_mean(lon, lat, z, bins=10, range=None):
bins = bins[::-1]
range = range[::-1]
wsum, _ = np.histogramdd((lat, lon), weights=z, bins=bins, range=range)
ppbin, edges = np.histogramdd((lat, lon), bins=bins, range=range)
#ppbin[ppbin==0] = np.nan
#ppbin = np.ma.masked_equal(ppbin, 0)
return (wsum/ppbin), ppbin, edges[1], edges[0]
示例13: test_filter_minmax
def test_filter_minmax(self):
"""
"""
result_c = histogramnd(self.sample,
self.histo_range,
self.n_bins,
weights=self.weights,
last_bin_closed=True,
weight_min=self.filter_min,
weight_max=self.filter_max)
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!
filter_min = self.dtype_weights(self.filter_min)
filter_max = self.dtype_weights(self.filter_max)
weight_idx = _get_in_range_indices(self.weights,
filter_min, # <------ !!!
filter_max, # <------ !!!
minop=operator.ge,
maxop=operator.le)
result_np = np.histogramdd(self.sample[weight_idx],
bins=self.n_bins,
range=self.histo_range)
result_np_w = np.histogramdd(self.sample[weight_idx],
bins=self.n_bins,
range=self.histo_range,
weights=self.weights[weight_idx])
# comparing "hits"
hits_cmp = np.array_equal(result_c[0],
result_np[0])
# comparing weights
weights_cmp = np.array_equal(result_c[1], result_np_w[0])
self.assertTrue(hits_cmp)
self.assertTrue(weights_cmp)
bins_min = [rng[0] for rng in self.histo_range]
bins_max = [rng[1] for rng in self.histo_range]
inrange_idx = _get_in_range_indices(self.sample[weight_idx],
bins_min,
bins_max,
minop=operator.ge,
maxop=operator.le)
inrange_idx = weight_idx[inrange_idx]
self.assertEqual(result_c[0].sum(), len(inrange_idx),
msg=self.state_msg)
# we have to sum the weights using the same precision as the
# histogramnd function
weights_sum = self.weights[inrange_idx].astype(result_c[1].dtype).sum()
self.assertTrue(self.array_compare(result_c[1].sum(), weights_sum),
msg=self.state_msg)
示例14: feature_dist
def feature_dist(self):
pcounts, e = np.histogramdd(self.pfeatures.view((np.float64, len(self.pfeatures.dtype.names))), bins=self.edges[:-1])
hcounts, e = np.histogramdd(self.hfeatures.view((np.float64, len(self.hfeatures.dtype.names))), bins=self.edges[:-1])
#Probability that a halo is present at a particle with features in a particular bin of feature space
#is the number of halos in that bin of feature space over the number of particles in that bin of
#feature space
self.php = hcounts/pcounts
示例15: test_inf_edges
def test_inf_edges(self):
"""Test using +/-inf bin edges works. See #1788."""
x = np.arange(6).reshape(3, 2)
expected = np.array([[1, 0], [0, 1], [0, 1]])
h, e = np.histogramdd(x, bins=[3, [-np.inf, 2, 10]])
assert_allclose(h, expected)
h, e = np.histogramdd(x, bins=[3, np.array([-1, 2, np.inf])])
assert_allclose(h, expected)
h, e = np.histogramdd(x, bins=[3, [-np.inf, 3, np.inf]])
assert_allclose(h, expected)