本文整理汇总了Python中scipy.polyfit函数的典型用法代码示例。如果您正苦于以下问题:Python polyfit函数的具体用法?Python polyfit怎么用?Python polyfit使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了polyfit函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: log_regress
def log_regress(f,xs, tol=0.1):
"""find root f(x) = 0 using logistic regression, starting with xs"""
last_log_xp = None
log_xp = None
print "initial seeding for log_regress"
ys = map(f,xs)
log_xs = map(log,xs)
plotting = False
while last_log_xp is None or abs(exp(log_xp) - exp(last_log_xp)) > tol:
print "correlation:",pearsonr(log_xs,ys)
#lin = poly1d(polyfit(log_xs,ys,1))
m, b = (polyfit(log_xs,ys,1))
if plotting:
lin = poly1d([m,b])
plt.scatter(log_xs,ys)
plt.plot(*pl(lin,log_xs))
plt.show()
last_log_xp = log_xp or None
log_xp = -b/m#secant_interval(lin,min(log_xs),max(log_xs))
log_xs.append(log_xp)
yxp = f(exp(log_xp))
ys.append(yxp)
print "x:",log_xp and exp(log_xp),"y:",ys[-1],\
"x last:",last_log_xp and exp(last_log_xp),\
"y last:",ys[-2] if len(ys) >= 2 else None
#lin = poly1d(polyfit(log_xs,ys,1))
m, b = (polyfit(log_xs,ys,1))
log_xp = -b/m#secant_interval(lin,min(log_xs),max(log_xs))
return exp(log_xp)
示例2: log_regress_spec
def log_regress_spec(f,xs, tol=0.01):
"""find root f(x) = 0 using logistic regression, starting with xs"""
#print "initial seeding for log_regress (unweighted)"
ys = map(f,xs)
log_xs = map(log,xs)
plotting = False
honest_guesses = []
while len(honest_guesses) < 2 or abs(honest_guesses[-1] -
honest_guesses[-2]) > tol:
#print "correlation:",pearsonr(log_xs,ys)
#lin = poly1d(polyfit(log_xs,ys,1))
m, b = (polyfit(log_xs,ys,1))
# if plotting:
# lin = poly1d([m,b])
# plt.scatter(log_xs,ys)
# plt.plot(*pl(lin,log_xs))
# plt.show()
honest_guess = -b/m
dx = -(honest_guesses[-1] - honest_guess) if honest_guesses else 0
log_xp = honest_guess + 2*dx
log_xs.append(log_xp)
yxp = f(exp(log_xp))
ys.append(yxp)
honest_guesses.append(honest_guess)
diff = (abs(exp(honest_guesses[-1]) - exp(honest_guesses[-2]))
if len(honest_guesses) > 1 else None)
# print "honest_guess:",(honest_guess),"xp:",(log_xp),\
# "y:",yxp, "diff:",diff
#lin = poly1d(polyfit(log_xs,ys,1))
m, b = (polyfit(log_xs,ys,1))
log_xp = -b/m#secant_interval(lin,min(log_xs),max(log_xs))
print "final guess: log_xp:",log_xp
return exp(log_xp)
示例3: main
def main(dbname, dbtable, dbuser, dbpass):
"""Main script execution"""
# Query database
result = query_database(dbname, dbtable, dbuser, dbpass, 1, 900)
# Break result up into separate columns
frame_counts, image_scales, widths, heights, proc_times = zip(*result)
# Movie area in square pixels and arcseconds
area_px = np.array(widths) * np.array(heights)
area_as = area_px * np.array(image_scales) ** 2
# Limit outlier influence from server downtime, etc
times = np.array(proc_times)
# Determine linear fit for number of frames and area
(m1, b1) = polyfit(frame_counts, times, 1)
(m2, b2) = polyfit(np.sqrt(area_px), times, 1)
print "Linear fit for num frames: y = %fx + %f" % (m1, b1)
print "Linear fit for movie size: y = %fx + %f" % (m2, b2)
# Basic time estimation
w1 = 0.8
w2 = 1 - w1
basic_est = lambda x, y: w1 * (m1 * x + b1) + w2 * (m2 * y + b2)
evaluate_estimation_fn(basic_est, frame_counts, np.sqrt(area_px), times)
开发者ID:Helioviewer-Project,项目名称:helioviewer.org_trunk_branch_from_bzr,代码行数:27,代码来源:movie_usage_stats.py
示例4: main
def main():
data = sp.genfromtxt('./data/web_traffic.tsv', delimiter='\t')
x = data[:, 0]
y = data[:, 1]
x = x[~sp.isnan(y)]
y = y[~sp.isnan(y)]
fp1 = sp.polyfit(x, y, 1)
print('Model parameters for fp1 %s' % fp1)
f1 = sp.poly1d(fp1)
print('This is the error rate for fp1 %f' % error(f1, x, y))
fp2 = sp.polyfit(x, y, 2)
print('Model parameters for fp2 %s' % fp2)
f2 = sp.poly1d(fp2)
print('This is the error rate for fp2 %f' % error(f2, x, y))
plt.scatter(x, y,color= 'pink')
plt.title('My first impression')
plt.xlabel('Time')
plt.ylabel('#Hits')
plt.xticks([w * 7 * 24 for w in range(10)], ['week %i' % w for w in range(10)])
fx = sp.linspace(0, x[-1], 1000)
plt.plot(fx, f1(fx), linewidth=3,color='cyan')
plt.plot(fx, f2(fx), linewidth=3, linestyle='--',color= 'red')
plt.legend(['d = %i' %f1.order, 'd = %i' %f2.order], loc='upper left')
plt.autoscale(tight=True)
plt.grid()
plt.show()
示例5: time_fitting
def time_fitting(x_fit,y_fit):
"""Fit a linear relation to the x_fit and y_fit parameters
Returns the actual fit and the parameters of the fit,
"""
import numpy as np
x_fit = np.array( x_fit )
y_fit = np.array( y_fit )
###First fit iteration and remove outliers
POLY_FIT_ORDER = 1
slope,intercept = scipy.polyfit(x_fit,y_fit,POLY_FIT_ORDER)
fit = scipy.polyval((slope,intercept),x_fit)
fit_sigma = fit.std()
include_index = np.where(np.abs(fit-y_fit) < 1.5*fit_sigma)[0]
if len(include_index) < 4:
return None,None,False
###Final Fit
x_fit_clipped = x_fit[include_index]
y_fit_clipped = y_fit[include_index]
parameters = scipy.polyfit(x_fit_clipped,y_fit_clipped,POLY_FIT_ORDER)
fit = scipy.polyval(parameters,x_fit)
return fit,parameters,True
示例6: fitKvsPower
def fitKvsPower(self, filename="PowerVGain.dat", zeroed="_0"):
if not hasattr(self, "k_avg"):
raise RuntimeError, "Must load all data and run calculate() before fitting"
gain, self.power = loadtxt(filename, skiprows=1, unpack=True)
if zeroed != None:
gain_0, power_0 = loadtxt(filename.replace(".", zeroed + ".", 1), skiprows=1, unpack=True)
self.power = self.power - power_0
savetxt(filename.replace(".", "_subtract."), self.power, fmt="%.5f")
if self.gain.tolist() != gain.tolist():
raise ValueError, "Laser power was not measured over the same gains as calibration measurements:\n\
Laser gain: " + str(
gain
) + "\nMeasurement gains: " + str(
self.gain
)
self.kfitX = polyfit(self.power, self.k_avg[:, 0], 1)
self.kfitY = polyfit(self.power, self.k_avg[:, 1], 1)
print "Fit parameters for X axis:"
print poly1d(self.kfitX, variable="power")
print "\nFit parameters for Y axis:"
print poly1d(self.kfitY, variable="power")
示例7: trainingAndTesting
def trainingAndTesting(self):
global train
global x, y, xa, xb, ya, yb
# separating training from testing data
frac = 0.3
split_idx = int(frac * len(xb))
rangeX = range(len(xb))
listX = list(rangeX)
logging.info("delta : %i", len(set(rangeX).difference(listX)))
shuffled = sp.random.permutation(list(range(len(xb))))
test = sorted(shuffled[:split_idx])
train = sorted(shuffled[split_idx:])
fbt1 = sp.poly1d(sp.polyfit(xb[train], yb[train], 1))
fbt2 = sp.poly1d(sp.polyfit(xb[train], yb[train], 2))
print("fbt2(x)= \n%s" % fbt2)
print("fbt2(x)-100,000= \n%s" % (fbt2 - 100000))
print("fbt2(x)= \n%s" % (fbt2))
fbt3 = sp.poly1d(sp.polyfit(xb[train], yb[train], 3))
fbt10 = sp.poly1d(sp.polyfit(xb[train], yb[train], 10))
fbt100 = sp.poly1d(sp.polyfit(xb[train], yb[train], 100))
print("Test errors for only the time after inflection point")
for f in [fbt1, fbt2, fbt3, fbt10, fbt100]:
print("Error d=%i: %f" % (f.order, self.error(f, xb[test], yb[test])))
示例8: dsi_adc
def dsi_adc(q_axis, data, mid_pos):
""" TODO
Returns
-------
ADC4, ADC8 : (I,J,K) volumes
Returns TODO
"""
# Fit with a polynomial the signals S and derive the ADC and the kurtosis
# initialization of the output maps
I, J, K, N = data.shape
ADC4 = np.zeros((I,J,K))
#Ku4 = np.zeros((b0.shape[0],b0.shape[1],b0.shape[2]))
#P04 = np.zeros((b0.shape[0],b0.shape[1],b0.shape[2]))
ADC8 = np.zeros(((I,J,K)))
#Ku8 = np.zeros((b0.shape[0],b0.shape[1],b0.shape[2]))
#P08 = np.zeros((b0.shape[0],b0.shape[1],b0.shape[2]))
pc = -1
count = 0
n = I * J * K
print "Polynomial fitting for each voxel signal over q-axis..."
# loop throughout all the voxels of the volume
for i in range(b0.shape[0]):
for j in range(b0.shape[1]):
for k in range(b0.shape[2]):
# Percent counter
count = count + 1
pcN = int(round( float(100*count)/n ))
if pcN > pc and pcN%1 == 0:
pc = pcN
print str(pc) + " %"
# for the current voxel, extract the signal to be fitted
S = data[i,j,k,:]
# normalization respect to the b0
S = S / S[mid_pos]
coeff = sp.polyfit(q_axis,S,8)
ADC8[i,j,k] = (-coeff[-3] / (2 * math.pi * math.pi))
# Ku8[i,j,k] = (6 * coeff[-5] / (coeff[-3] * coeff[-3])) - 3
# temp = np.polyval(coeff, q_axis)
# P08[i,j,k] = np.sum(temp)
coeff = sp.polyfit(q_axis,S,4)
ADC4[i,j,k] = (-coeff[-3] / (2 * math.pi * math.pi))
# Ku4[i,j,k] = (6 * coeff[-5] / (coeff[-3] * coeff[-3])) - 3
# temp = np.polyval(coeff, q_axis)
# P04[i,j,k] = np.sum(temp)
print "[ OK ]"
return ADC4, ADC8
示例9: generate_report
def generate_report():
"""See title"""
word_file = open("word_file.txt", 'ab+')
bigram_file = open("bigram_file.txt", 'ab+')
sorted_words = sorted(words.items(), key = lambda x: x[1])
sorted_bigrams = sorted(bigrams.items(), key = lambda x: x[1])
print(sorted_words, file=word_file)
print(sorted_bigrams, file=bigram_file)
word_names, word_values = zip(*reversed(sorted_words))
bigram_names, bigram_values = zip(*reversed(sorted_bigrams))
#Create word plot
xdata, ydata = range(1,len(word_values)+1), word_values
xd, yd = log10(xdata), log10(ydata)
polycoef = polyfit(xd,yd,1)
yfit = 10** (polycoef[0]*xd+polycoef[1])
plt.figure()
plt.loglog(xdata, ydata, 'ro' , xdata, yfit, 'g--')
plt.title("Word frequency distribution")
plt.ylabel("Word Frequency")
plt.xlabel("Words")
plt.grid()
plt.savefig('Question1_wordplot.png')
total_words = sum(word_values)
word_percent = [(float(wv)/total_words) for wv in word_values] #word_percent
c_values = [(lambda x: x[0]*x[1])(x) for x in zip(xdata, word_percent)]
c = sum(c_values)/len(c_values)
print("Word c: ", c)
#Create bigram plot
xdata, ydata = range(1,len(bigram_values)+1), bigram_values
xd, yd = log10(xdata), log10(ydata)
polycoef = polyfit(xd,yd,1)
yfit = 10** (polycoef[0]*xd+polycoef[1])
plt.figure()
plt.loglog(xdata, ydata, 'ro' , xdata, yfit, 'g--')
plt.title("Bigram frequency distribution")
plt.ylabel("Bigram Frequency")
plt.xlabel("Bigrams")
plt.grid()
plt.savefig('Question1_bigramplot.png')
total_bigrams = sum(bigram_values)
bigram_percent = [(float(bv)/total_bigrams) for bv in bigram_values] #word_percent
c_values = [(lambda x: x[0]*x[1])(x) for x in zip(xdata, bigram_percent)]
c = sum(c_values)/len(c_values)
print("Bigram c: ", c)
return
示例10: go
def go(
mags=(15, 30.1, 0.5), redshifts=(0.01, 12, 0.01), coeff_file="prior_K_zmax7_coeff.dat", outfile="prior_K_extend.dat"
):
fp = open(coeff_file)
lines = fp.readlines()
fp.close()
mag_list = np.cast[float](lines[0].split()[2:])
z0 = np.cast[float](lines[1].split()[1:])
gamma = np.cast[float](lines[2].split()[1:])
z_grid = np.arange(redshifts[0], redshifts[1], redshifts[2])
NZ = z_grid.shape[0]
mag_grid = np.arange(mags[0], mags[1], mags[2])
NM = mag_grid.shape[0]
#### Polynomial extrapolation not reliable
# p_z0 = scipy.polyfit(mag_list, z0, order)
# z0_grid = np.maximum(scipy.polyval(p_z0, mag_grid), 0.05)
# p_gamma = scipy.polyfit(mag_list, gamma, order)
# gamma_grid = np.maximum(scipy.polyval(p_gamma, mag_grid), 0.05)
#### Interpolations on the defined grid
z0_grid = np.interp(mag_grid, mag_list, z0)
gamma_grid = np.interp(mag_grid, mag_list, gamma)
#### Linear extrapolations of fit coefficients
p_z0 = scipy.polyfit(mag_list[:3], z0[:3], 1)
z0_grid[mag_grid < mag_list[0]] = np.maximum(scipy.polyval(p_z0, mag_grid[mag_grid < mag_list[0]]), 0.05)
p_z0 = scipy.polyfit(mag_list[-3:], z0[-3:], 1)
z0_grid[mag_grid > mag_list[-1]] = np.maximum(scipy.polyval(p_z0, mag_grid[mag_grid > mag_list[-1]]), 0.05)
p_gamma = scipy.polyfit(mag_list[:3], gamma[:3], 1)
gamma_grid[mag_grid < mag_list[0]] = np.maximum(scipy.polyval(p_gamma, mag_grid[mag_grid < mag_list[0]]), 0.05)
p_gamma = scipy.polyfit(mag_list[-3:], gamma[-3:], 1)
gamma_grid[mag_grid > mag_list[-1]] = np.maximum(scipy.polyval(p_gamma, mag_grid[mag_grid > mag_list[-1]]), 0.05)
out_matrix = np.zeros((NZ, NM + 1))
out_matrix[:, 0] = z_grid
for i in range(NM):
pz = z_grid * np.exp(-(z_grid / z0_grid[i]) ** gamma_grid[i])
pz /= np.trapz(pz, z_grid)
plt.plot(z_grid, pz, label=mag_grid[i])
out_matrix[:, i + 1] = pz
plt.legend(ncol=3, loc="upper right")
header = "# z "
for m in mag_grid:
header += "%6.1f" % (m)
fp = open(outfile, "w")
fp.write(header + "\n")
np.savetxt(fp, out_matrix, fmt="%6.3e")
fp.close()
示例11: concat_extrap_ends
def concat_extrap_ends(x, npts, polyorder=1, lowside=True, highside=True):
i=numpy.arange(npts, dtype='float64')
if lowside:
ans=scipy.polyfit(-1*(i+1.), x[:npts], polyorder)
x=numpy.concatenate([scipy.polyval(list(ans), i[::-1]), x])
if highside:
ans=scipy.polyfit(-1*(i[::-1]-1.), x[-1*npts:], polyorder)
x=numpy.concatenate([x, scipy.polyval(list(ans), i)])
return x
示例12: estimateBeta
def estimateBeta(priceY,priceX,algo = 'standard'):
'''
estimate stock Y vs stock X beta using iterative linear
regression. Outliers outside 3 sigma boundary are filtered out
Parameters
--------
priceX : price series of x (usually market)
priceY : price series of y (estimate beta of this price)
Returns
--------
beta : stockY beta relative to stock X
'''
X = pd.DataFrame({'x':priceX,'y':priceY})
if algo=='returns':
ret = (X/X.shift(1)-1).dropna().values
x = ret[:,0]
y = ret[:,1]
# filter high values
low = np.percentile(x,20)
high = np.percentile(x,80)
iValid = (x>low) & (x<high)
x = x[iValid]
y = y[iValid]
iteration = 1
nrOutliers = 1
while iteration < 10 and nrOutliers > 0 :
(a,b) = polyfit(x,y,1)
yf = polyval([a,b],x)
#plot(x,y,'x',x,yf,'r-')
err = yf-y
idxOutlier = abs(err) > 3*np.std(err)
nrOutliers =sum(idxOutlier)
beta = a
#print 'Iteration: %i beta: %.2f outliers: %i' % (iteration,beta, nrOutliers)
x = x[~idxOutlier]
y = y[~idxOutlier]
iteration += 1
elif algo=='log':
x = np.log(X['x'])
y = np.log(X['y'])
(a,b) = polyfit(x,y,1)
beta = a
elif algo=='standard':
ret =np.log(X).diff().dropna()
beta = ret['x'].cov(ret['y'])/ret['x'].var()
else:
raise TypeError("unknown Beta algorithm type, use 'standard', 'log' or 'returns'")
return beta
示例13: plotFullModel
def plotFullModel(dm, sacc):
"""
"""
fig = plt.figure(figsize = (15, 5))
plt.suptitle("Sacc = %s" % sacc)
ax1 = plt.subplot(141)
lCols = ["blue", "red", "orange", "yellow", "green", "pink"]
xVar = "sacc%s_ex" % sacc
yVar = "sacc%s_ey" % sacc
dm = dm.select("%s != ''" % xVar)
dm = dm.select("%s != ''" % yVar)
dm = dm.select("%s != -1000" % xVar)
dm = dm.select("%s != -1000" % yVar)
for a in dm.unique("realAngle"):
_dm = dm.select("realAngle == %s" % a)
col = lCols.pop()
plt.scatter(_dm[xVar], _dm[yVar], color = col, marker = ".", label = a)
plt.axvline(constants.xCen, color = gray[3], linestyle = '--')
plt.axhline(constants.yCen, color = gray[3], linestyle = '--')
ax2 = plt.subplot(142)
pm = PivotMatrix(dm, ["stim_type", "realAngle"], ["file"], "xNorm1", colsWithin =True)
pm.linePlot(fig = fig)
pm.save("PM.csv")
pm._print()
plt.axhline(0, color = gray[3], linestyle = "--")
ax3 = plt.subplot(143)
plt.title("object")
dmObj= dm.select("stim_type == 'object'")
#slope, intercept, r_value, p_value, std_err = scipy.stats.linregress(stimObj["ecc"], stimObj["xNorm1"])
x = dmObj["ecc"]
y = dmObj["xNorm%s" % sacc]
fit = scipy.polyfit(x,y,1)
fit_fn = scipy.poly1d(fit)
plt.plot(x,y, 'yo', x, fit_fn(x), '--k')
ax4 = plt.subplot(144)
plt.title("non-object")
dmNo= dm.select("stim_type == 'non-object'")
#slope, intercept, r_value, p_value, std_err = scipy.stats.linregress(stimObj["ecc"], stimObj["xNorm1"])
x = dmNo["ecc"]
y = dmNo["xNorm%s" % sacc]
fit = scipy.polyfit(x,y,1)
fit_fn = scipy.poly1d(fit)
plt.plot(x,y, 'yo', x, fit_fn(x), '--k')
plt.savefig("./plots/Full_model_004C_sacc%s.png" % sacc)
plt.show()
示例14: logaod_polyfit
def logaod_polyfit(wvl,aod,polynum=False):
"""
Purpose:
Take in an array of aod spectra and calculate the polynomials associated with each log of the spectrum
takes in the wvl
Input:
aod: numpy array of time,wvl
wvl: numpy array of wavelengths in nm
Output:
array of polynomial coefficients for each time point in the log(aod)
Keywords:
polynum: (optional, defaults to N-2 where N is the number of points in the spectrum)
number of polynomials coefficients to return (order of the polynomial), if set to False, uses default N-2
Dependencies:
- numpy
- scipy
Needed Files:
- None
Modification History:
Written: Samuel LeBlanc, NASA Ames Research Center, 2017-11-28
Modified:
"""
import numpy as np
from scipy import polyfit
# sanitize inputs
shape = aod.shape
wvl = np.array(wvl)
if not len(wvl.shape)==1:
raise ValueError('wvl is not a array with one dimension')
if not len(wvl) in shape:
raise ValueError('No size of aod is the same as wvl')
if not polynum:
polynum = len(wvl)-2
if len(shape)>1:
ni,n = [(i,j) for (i,j) in enumerate(shape) if not j==len(wvl)][0]
cc = np.zeros((polynum,n))
for i in range(n):
if ni==0:
cc[:,i] = polyfit(np.log(wvl),np.log(aod[i,:]),polynum)
else:
cc[:,i] = polyfit(np.log(wvl),np.log(aod[:,i]),polynum)
else:
cc = polyfit(np.log(wvl),np.log(aod),polynum)
return cc
示例15: estimateBeta
def estimateBeta(priceY, priceX, algo="standard"):
"""
estimate stock Y vs stock X beta using iterative linear
regression. Outliers outside 3 sigma boundary are filtered out
Parameters
--------
priceX : price series of x (usually market)
priceY : price series of y (estimate beta of this price)
Returns
--------
beta : stockY beta relative to stock X
"""
X = DataFrame({"x": priceX, "y": priceY})
if algo == "returns":
ret = (X / X.shift(1) - 1).dropna().values
# print len(ret)
x = ret[:, 0]
y = ret[:, 1]
iteration = 1
nrOutliers = 1
while iteration < 10 and nrOutliers > 0:
(a, b) = polyfit(x, y, 1)
yf = polyval([a, b], x)
# plot(x,y,'x',x,yf,'r-')
err = yf - y
idxOutlier = abs(err) > 3 * np.std(err)
nrOutliers = sum(idxOutlier)
beta = a
# print 'Iteration: %i beta: %.2f outliers: %i' % (iteration,beta, nrOutliers)
x = x[~idxOutlier]
y = y[~idxOutlier]
iteration += 1
elif algo == "log":
x = np.log(X["x"])
y = np.log(X["y"])
(a, b) = polyfit(x, y, 1)
beta = a
elif algo == "standard":
ret = np.log(X).diff().dropna()
beta = ret["x"].cov(ret["y"]) / ret["x"].var()
else:
raise TypeError("unknown algorithm type, use 'standard', 'log' or 'returns'")
return beta