per.andreas.brodtkorb 13 years ago
parent 02f8fbf949
commit 25b47ebc04

@ -3471,12 +3471,12 @@ def _logitinv(x):
return 1.0/(np.exp(-x)+1) return 1.0/(np.exp(-x)+1)
def kreg_demo2(n=100, hs=None, symmetric=False, fun='hisj'): def kreg_demo2(n=100, hs=None, symmetric=False, fun='hisj', plotlog=False):
import scipy.stats as st import scipy.stats as st
from sg_filter import SavitzkyGolay from sg_filter import SavitzkyGolay
dist = st.norm dist = st.norm
scale1 = 0.7 scale1 = 0.3
loc1= 2 loc1= 0
norm1 = dist.pdf(-loc1, loc=-loc1, scale=scale1) + dist.pdf(-loc1, loc=loc1, scale=scale1) norm1 = dist.pdf(-loc1, loc=-loc1, scale=scale1) + dist.pdf(-loc1, loc=loc1, scale=scale1)
fun1 = lambda x : (dist.pdf(x, loc=-loc1, scale=scale1) + dist.pdf(x, loc=loc1, scale=scale1))/norm1 fun1 = lambda x : (dist.pdf(x, loc=-loc1, scale=scale1) + dist.pdf(x, loc=loc1, scale=scale1))/norm1
@ -3501,18 +3501,25 @@ def kreg_demo2(n=100, hs=None, symmetric=False, fun='hisj'):
y = yi[i] y = yi[i]
xmin, xmax = x.min(), x.max() xmin, xmax = x.min(), x.max()
ni = int(2*(xmax-xmin)/hopt) ni = 2*int((xmax-xmin)/hopt)+1
print(ni) print(ni)
print(xmin, xmax) print(xmin, xmax)
xi = np.linspace(xmin-hopt,xmax+hopt, ni) xi = np.linspace(xmin-hopt,xmax+hopt, ni)
xiii = np.linspace(xmin-hopt,xmax+hopt, 4*ni) xiii = np.linspace(xmin-hopt,xmax+hopt, 4*ni+1)
kreg = KRegression(x, y, hs=hs*0.5)
fi = kreg(xi)
f = kreg(xiii,output='plotobj', title='Kernel regression n=%d, %s=%g' % (n,fun,hs), plotflag=1)
c = gridcount(x, xi) c = gridcount(x, xi)
c0 = gridcount(x[y==True],xi) c0 = gridcount(x[y==True],xi)
yi = np.where(c==0, 0, c0/c) yi = np.where(c==0, 0, c0/c)
yi[yi==0] = 1.0/(c.min()+4)#yi[yi>0].min()/sqrt(n) #yi[yi==0] = 1.0/(c[c!=0].min()+4)#
yi[yi==1] = 1-1.0/(c.min()+4) #(1-yi[yi<1].max())/sqrt(n) #yi[yi==1] = 1-1.0/(c[c!=0].min()+4) #
yi[yi==0] = fi[yi==0] #yi[yi>0].min()/sqrt(n)
yi[yi==1] =1-(1-yi[yi<1].max())/sqrt(n)
logity =_logit(yi) logity =_logit(yi)
# plt.plot(xi, np.log(yi/(1-yi)), xi,logity,'.') # plt.plot(xi, np.log(yi/(1-yi)), xi,logity,'.')
@ -3521,7 +3528,7 @@ def kreg_demo2(n=100, hs=None, symmetric=False, fun='hisj'):
logity[logity==-40]=np.nan logity[logity==-40]=np.nan
slogity = smoothn(logity, robust=True) slogity = smoothn(logity, robust=True)
slogity2 = SavitzkyGolay(n=3, degree=2).smooth(logity) slogity2 = SavitzkyGolay(n=2, degree=2).smooth(logity)
sa1 = sqrt(evar(logity)) sa1 = sqrt(evar(logity))
sa = (slogity-logity).std() sa = (slogity-logity).std()
print('estd = %g %g' % (sa,sa1)) print('estd = %g %g' % (sa,sa1))
@ -3539,7 +3546,7 @@ def kreg_demo2(n=100, hs=None, symmetric=False, fun='hisj'):
#return #return
#print(logyi) #print(logyi)
gkreg = KRegression(xi, logity, hs=hs/2, xmin=xmin-2*hopt,xmax=xmax+2*hopt) gkreg = KRegression(xi, logity, hs=hs*0.4, xmin=xmin-2*hopt,xmax=xmax+2*hopt)
fg = gkreg.eval_grid(xi,output='plotobj', title='Kernel regression', plotflag=1) fg = gkreg.eval_grid(xi,output='plotobj', title='Kernel regression', plotflag=1)
sa = (fg.data-logity).std() sa = (fg.data-logity).std()
sa2 = iqrange(fg.data-logity) / 1.349 sa2 = iqrange(fg.data-logity) / 1.349
@ -3554,7 +3561,6 @@ def kreg_demo2(n=100, hs=None, symmetric=False, fun='hisj'):
# return # return
fg = gkreg.eval_grid(xiii,output='plotobj', title='Kernel regression', plotflag=1) fg = gkreg.eval_grid(xiii,output='plotobj', title='Kernel regression', plotflag=1)
dx = xi[1]-xi[0] dx = xi[1]-xi[0]
@ -3570,6 +3576,9 @@ def kreg_demo2(n=100, hs=None, symmetric=False, fun='hisj'):
fg.data = pi fg.data = pi
#pi = f.data
# ref Casella and Berger (1990) "Statistical inference" pp444 # ref Casella and Berger (1990) "Statistical inference" pp444
a = 2*pi + z0**2/(ciii+1e-16) a = 2*pi + z0**2/(ciii+1e-16)
b = 2*(1+z0**2/(ciii+1e-16)) b = 2*(1+z0**2/(ciii+1e-16))
@ -3590,9 +3599,6 @@ def kreg_demo2(n=100, hs=None, symmetric=False, fun='hisj'):
plt.figure(2) plt.figure(2)
fg.plot(label='KReg grid') fg.plot(label='KReg grid')
kreg = KRegression(x, y, hs=hs*0.5)
f = kreg(xiii,output='plotobj', title='Kernel regression n=%d, %s=%g' % (n,fun,hs), plotflag=1)
f.plot(label='KRegression') f.plot(label='KRegression')
labtxt = '%d CI' % (int(100*(1-alpha))) labtxt = '%d CI' % (int(100*(1-alpha)))
plt.plot(xi, syi, 'k',xi, syi2,'k--', label='smoothn') plt.plot(xi, syi, 'k',xi, syi2,'k--', label='smoothn')
@ -3605,7 +3611,8 @@ def kreg_demo2(n=100, hs=None, symmetric=False, fun='hisj'):
print(kreg.tkde.tkde.hs) print(kreg.tkde.tkde.hs)
plt.legend() plt.legend()
h = plt.gca() h = plt.gca()
#plt.setp(h,yscale='log') if plotlog:
plt.setp(h,yscale='log')
plt.show() plt.show()
def kde_gauss_demo(n=50): def kde_gauss_demo(n=50):
@ -3672,6 +3679,6 @@ if __name__ == '__main__':
#kde_demo2() #kde_demo2()
#kreg_demo1(fast=True) #kreg_demo1(fast=True)
#kde_gauss_demo() #kde_gauss_demo()
kreg_demo2(n=3800,symmetric=True,fun='hste') kreg_demo2(n=180,symmetric=True,fun='hste', plotlog=False)
#test_smoothn_2d() #test_smoothn_2d()
#test_smoothn_cardioid() #test_smoothn_cardioid()
Loading…
Cancel
Save