Simplified code further..

master
Per A Brodtkorb 8 years ago
parent dbb58b39e7
commit 9aa06ef075

@ -154,9 +154,6 @@ class _KDE(object):
The values evaluated at meshgrid(*args). The values evaluated at meshgrid(*args).
""" """
if len(args) == 0:
args = self.get_args()
self.args = args
return self.eval_grid_fun(self._eval_grid_fast, *args, **kwds) return self.eval_grid_fun(self._eval_grid_fast, *args, **kwds)
def _eval_grid_fast(self, *args, **kwds): def _eval_grid_fast(self, *args, **kwds):
@ -180,9 +177,6 @@ class _KDE(object):
The values evaluated at meshgrid(*args). The values evaluated at meshgrid(*args).
""" """
if len(args) == 0:
args = self.get_args()
self.args = args
return self.eval_grid_fun(self._eval_grid, *args, **kwds) return self.eval_grid_fun(self._eval_grid, *args, **kwds)
def _eval_grid(self, *args, **kwds): def _eval_grid(self, *args, **kwds):
@ -212,6 +206,9 @@ class _KDE(object):
return wdata return wdata
def eval_grid_fun(self, eval_grd, *args, **kwds): def eval_grid_fun(self, eval_grd, *args, **kwds):
if len(args) == 0:
args = self.get_args()
self.args = args
output = kwds.pop('output', 'value') output = kwds.pop('output', 'value')
f = eval_grd(*args, **kwds) f = eval_grd(*args, **kwds)
if output == 'value': if output == 'value':
@ -824,7 +821,7 @@ class KDE(_KDE):
return self._loop_over_points(self.dataset, points, y, r) return self._loop_over_points(self.dataset, points, y, r)
class KRegression(object): # _KDE): class KRegression(object):
""" Kernel-Regression """ Kernel-Regression
@ -942,13 +939,29 @@ class BKRegression(object):
Setting a=b=0.5 gives Jeffreys interval. Setting a=b=0.5 gives Jeffreys interval.
''' '''
def __init__(self, *args, **kwds): def __init__(self, data, y, method='beta', a=0.05, b=0.05, p=0, hs_e=None,
self.method = kwds.pop('method', 'beta') hs=None, kernel=None, alpha=0.0, xmin=None, xmax=None,
self.a = max(kwds.pop('a', 0.5), _TINY) inc=128, L2=None):
self.b = max(kwds.pop('b', 0.5), _TINY) self.method = method
self.kreg = KRegression(*args, **kwds) self.a = max(a, _TINY)
self.b = max(b, _TINY)
self.kreg = KRegression(data, y, p=p, hs=hs, kernel=kernel,
alpha=alpha, xmin=xmin, xmax=xmax, inc=inc,
L2=L2)
# defines bin width (i.e. smoothing) in empirical estimate # defines bin width (i.e. smoothing) in empirical estimate
self.hs_e = None self.hs_e = hs_e
@property
def hs_e(self):
return self._hs_e
@hs_e.setter
def hs_e(self, hs_e):
if hs_e is None:
hs1 = self._get_max_smoothing('hste')[0]
hs2 = self._get_max_smoothing('hos')[0]
hs_e = sqrt(hs1 * hs2)
self._hs_e = hs_e
def _set_smoothing(self, hs): def _set_smoothing(self, hs):
self.kreg.tkde.hs = hs self.kreg.tkde.hs = hs
@ -981,10 +994,6 @@ class BKRegression(object):
def get_grid(self, hs_e=None): def get_grid(self, hs_e=None):
if hs_e is None: if hs_e is None:
if self.hs_e is None:
hs1 = self._get_max_smoothing('hste')[0]
hs2 = self._get_max_smoothing('hos')[0]
self.hs_e = sqrt(hs1 * hs2)
hs_e = self.hs_e hs_e = self.hs_e
x = self.x x = self.x
xmin, xmax = x.min(), x.max() xmin, xmax = x.min(), x.max()
@ -1007,8 +1016,7 @@ class BKRegression(object):
def _credible_interval(self, n, p, alpha): def _credible_interval(self, n, p, alpha):
# Jeffreys intervall a=b=0.5 # Jeffreys intervall a=b=0.5
# st.beta.isf(alpha/2, y+a, n-y+b) y = n*p, n-y = n*(1-p) # st.beta.isf(alpha/2, y+a, n-y+b) y = n*p, n-y = n*(1-p)
a = self.a a, b = self.a, self.b
b = self.b
st = scipy.stats st = scipy.stats
pup = np.where(p == 1, 1, pup = np.where(p == 1, 1,
st.beta.isf(alpha / 2, n * p + a, n * (1 - p) + b)) st.beta.isf(alpha / 2, n * p + a, n * (1 - p) + b))
@ -1027,22 +1035,10 @@ class BKRegression(object):
estimated probability of success in each trial estimated probability of success in each trial
alpha : scalar alpha : scalar
confidence level confidence level
method : {'beta', 'wilson'}
method is one of the following
'beta', return Bayesian Credible interval using beta-distribution.
'wilson', return Wilson score interval
a, b : scalars
parameters of the beta distribution defining the apriori
distribution of p, i.e.,
the Bayes estimator for p: p = (y+a)/(n+a+b).
Setting a=b=0.5 gives Jeffreys interval.
""" """
if self.method.startswith('w'): if self.method.startswith('w'):
plo, pup = self._wilson_score(n, p, alpha) return self._wilson_score(n, p, alpha)
else: return self._credible_interval(n, p, alpha)
plo, pup = self._credible_interval(n, p, alpha)
return plo, pup
def prb_empirical(self, xi=None, hs_e=None, alpha=0.05, color='r', **kwds): def prb_empirical(self, xi=None, hs_e=None, alpha=0.05, color='r', **kwds):
"""Returns empirical binomial probabiltity. """Returns empirical binomial probabiltity.
@ -1155,11 +1151,9 @@ class BKRegression(object):
""" """
if prb_e is None: if prb_e is None:
prb_e = self.prb_empirical( prb_e = self.prb_empirical(alpha=alpha, color=color)
hs_e=self.hs_e, alpha=alpha, color=color)
if hsvec is None: if hsvec is None:
hsmax = self._get_max_smoothing(hsfun)[0] hsmax = max(self._get_max_smoothing(hsfun)[0], self.hs_e)
hsmax = max(hsmax, self.hs_e)
hsvec = np.linspace(hsmax * 0.2, hsmax, 55) hsvec = np.linspace(hsmax * 0.2, hsmax, 55)
hs_best = hsvec[-1] + 0.1 hs_best = hsvec[-1] + 0.1
@ -1371,18 +1365,22 @@ def kreg_demo1(hs=None, fast=False, fun='hisj'):
if fast: if fast:
kreg.__call__ = kreg.eval_grid_fast kreg.__call__ = kreg.eval_grid_fast
f = kreg(output='plot', title='Kernel regression', plotflag=1) f = kreg(x, output='plot', title='Kernel regression', plotflag=1)
plt.figure(0) plt.figure(0)
f.plot(label='p=0') f.plot(label='p=0')
kreg.p = 1 kreg.p = 1
f1 = kreg(output='plot', title='Kernel regression', plotflag=1) f1 = kreg(x, output='plot', title='Kernel regression', plotflag=1)
f1.plot(label='p=1') f1.plot(label='p=1')
# print(f1.data) # print(f1.data)
plt.plot(x, y, '.', label='data') plt.plot(x, y, '.', label='data')
plt.plot(x, y0, 'k', label='True model') plt.plot(x, y0, 'k', label='True model')
plt.legend() from statsmodels.nonparametric.kernel_regression import KernelReg
kreg2 = KernelReg(y, x, ('c'))
y2 = kreg2.fit(x)
plt.plot(x, y2[0], 'm', label='statsmodel')
plt.legend()
plt.show() plt.show()
print(kreg.tkde.tkde._inv_hs) print(kreg.tkde.tkde._inv_hs)
@ -1446,10 +1444,10 @@ def check_bkregression():
if __name__ == '__main__': if __name__ == '__main__':
if True: if False:
test_docstrings(__file__) test_docstrings(__file__)
else: else:
# kde_demo5() # kde_demo5()
check_bkregression() # check_bkregression()
# kreg_demo1(fast=True) kreg_demo1(hs=0.04, fast=True)
plt.show('hold') plt.show('hold')

@ -215,11 +215,24 @@ class TestRegression(unittest.TestCase):
0.0317357805015679, -0.0736187558312158, 0.04791463883941161, 0.0317357805015679, -0.0736187558312158, 0.04791463883941161,
0.0660021138871709, -0.1049359954387588, 0.0034961490852392463] 0.0660021138871709, -0.1049359954387588, 0.0034961490852392463]
# print(ei.tolist()) # print(ei.tolist())
y0 = 2*np.exp(-x**2/(2*0.3**2))+3*np.exp(-(x-1)**2/(2*0.7**2))
y = 2*np.exp(-x**2/(2*0.3**2))+3*np.exp(-(x-1)**2/(2*0.7**2)) + ei y = y0 + ei
kreg = wk.KRegression(x, y) kreg = wk.KRegression(x, y)
f = kreg(output='plotobj', title='Kernel regression', plotflag=1) f = kreg(output='plotobj', title='Kernel regression', plotflag=1)
kreg.p = 1
f1 = kreg(output='plot', title='Kernel regression', plotflag=1)
# import matplotlib.pyplot as plt
# plt.figure(0)
# f.plot(label='p=0')
# f1.plot(label='p=1')
# # print(f1.data)
# plt.plot(x, y, '.', label='data')
# plt.plot(x, y0, 'k', label='True model')
# plt.legend()
# plt.show('hold')
assert_allclose(f.data[::5], assert_allclose(f.data[::5],
[3.14313544673463, 3.14582567119112, 3.149199078830904, [3.14313544673463, 3.14582567119112, 3.149199078830904,
3.153335095194225, 3.15813722171621, 3.16302709623568, 3.153335095194225, 3.15813722171621, 3.16302709623568,
@ -231,6 +244,18 @@ class TestRegression(unittest.TestCase):
2.987516554300354, 2.9894470264681, 2.990311688080114, 2.987516554300354, 2.9894470264681, 2.990311688080114,
2.9906144224522406, 2.9906534916935743]) 2.9906144224522406, 2.9906534916935743])
print(f1.data[::5].tolist())
assert_allclose(f1.data[::5],
[2.7832831899382, 2.83222307174095, 2.891112685251379,
2.9588984473431, 3.03155510969298, 3.1012027219652127,
3.1565263737763, 3.18517573180120, 3.177939796091202,
3.13336188049535, 3.06057968378847, 2.978164236442354,
2.9082732327128, 2.867790922237915, 2.861643209932334,
2.88347067948676, 2.92123931823944, 2.96263190368498,
2.9985444322015, 3.0243198029657, 3.038629147365635,
3.04171702362464, 3.03475567689171, 3.020239732466334,
3.002434232424511, 2.987257365211814])
def test_BKRegression(self): def test_BKRegression(self):
# from wafo.kdetools.kdetools import _get_data # from wafo.kdetools.kdetools import _get_data
# n = 51 # n = 51

Loading…
Cancel
Save