Div. updates

master
Per.Andreas.Brodtkorb 14 years ago
parent bb7b1de823
commit 0b71a79872

@ -0,0 +1,49 @@
'''
Created on 20. jan. 2011
@author: pab
'''
import numpy as np
from numpy import exp
from wafo.misc import meshgrid
__all__ = ['peaks', 'humps']
def peaks(x=None, y=None, n=51):
'''
Return the "well" known MatLab (R) peaks function
evaluated in the [-3,3] x,y range
Example
-------
>>> import pylab as plt
>>> x,y,z = peaks()
>>> h = plt.contourf(x,y,z)
'''
if x is None:
x = np.linspace(-3, 3, n)
if y is None:
y = np.linspace(-3, 3, n)
[x1, y1] = meshgrid(x, y)
z = (3 * (1 - x1) ** 2 * exp(-(x1 ** 2) - (y1 + 1) ** 2)
- 10 * (x1 / 5 - x1 ** 3 - y1 ** 5) * exp(-x1 ** 2 - y1 ** 2)
- 1. / 3 * exp(-(x1 + 1) ** 2 - y1 ** 2))
return x1, y1, z
def humps(x=None):
'''
Computes a function that has three roots, and some humps.
'''
if x is None:
y = np.linspace(0, 1)
else:
y = np.asarray(x)
return 1.0 / ((y - 0.3) ** 2 + 0.01) + 1.0 / ((y - 0.9) ** 2 + 0.04) + 2 * y - 5.2
if __name__ == '__main__':
pass

@ -32,7 +32,6 @@ from pylab import *
#! from commands used in Chapter 2 #! from commands used in Chapter 2
#! #!
pstate = 'off';
#! Section 2.1 Introduction and preliminary analysis #! Section 2.1 Introduction and preliminary analysis
#!==================================================== #!====================================================

@ -0,0 +1,110 @@
'''
Created on 20. jan. 2011
@author: pab
license BSD
'''
import warnings
import numpy as np
from wafo.plotbackend import plotbackend
__all__ = ['cltext']
def cltext(levels, percent=False, n=4, xs=0.036, ys=0.94, zs=0):
'''
Places contour level text in the current window
Parameters
----------
levels = vector of contour levels or the corresponding percent which the
contour line encloses
percent = 0 if levels are the actual contour levels (default)
1 if levels are the corresponding percent which the
contour line encloses
n = maximum N digits of precision (default 4)
Returns
h = handles to the text objects.
CLTEXT creates text objects in the current figure and prints
"Level curves at:" if percent is False and
"Level curves enclosing:" otherwise
and the contour levels or percent.
NOTE:
-The handles to the lines of text may also be found by
h = findobj(gcf,'gid','CLTEXT','type','text');
h = findobj(gca,'gid','CLTEXT','type','text');
-To make the text objects follow the data in the axes set the units
for the text objects 'data' by
set(h,'unit','data')
Examples:
>>> from wafo.integrate import peaks
>>> import pylab as plt
>>> x,y,z = peaks();
>>> h = plt.contour(x,y,z)
>>> h = cltext(h.levels)
>>> plt.show()
data = rndray(1,2000,2); f = kdebin(data,{'kernel','epan','L2',.5,'inc',128});
contour(f.x{:},f.f,f.cl),cltext(f.pl,1)
See also
pdfplot
'''
# TODO : Make it work like legend does (but without the box): include position options etc...
clevels = np.atleast_1d(levels)
_CLTEXT_TAG = 'CLTEXT'
cax = plotbackend.gca()
axpos = cax.get_position()
xint = axpos.intervalx
yint = axpos.intervaly
xss = xint[0] + xs * (xint[1] - xint[0])
yss = yint[0] + ys * (yint[1] - yint[0])
cf = plotbackend.gcf() # get current figure
#% delete cltext object if it exists
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
def matchfun(x):
if hasattr(x, 'get_gid'):
return x.get_gid() == _CLTEXT_TAG
return False
h_cltxts = plotbackend.findobj(cf, matchfun);
if len(h_cltxts):
for i in h_cltxts:
try:
cax.texts.remove(i)
except:
warnings.warn('Tried to delete a non-existing CL-text')
try:
cf.texts.remove(i)
except:
warnings.warn('Tried to delete a non-existing CL-text')
charHeight = 1 / 33;
delta_y = charHeight;
if percent:
titletxt = 'Level curves enclosing:';
else:
titletxt = 'Level curves at:';
format = '%0.' + ('%d' % n) + 'g\n'
cltxt = ''.join([format % level for level in clevels.tolist()])
titleProp = dict(gid=_CLTEXT_TAG, horizontalalignment='left',
verticalalignment='center', fontweight='bold', axes=cax) #
ha1 = plotbackend.figtext(xss, yss, titletxt, **titleProp)
yss -= delta_y;
txtProp = dict(gid=_CLTEXT_TAG, horizontalalignment='left',
verticalalignment='top', axes=cax)
ha2 = plotbackend.figtext(xss, yss, cltxt, **txtProp)
return ha1, ha2
if __name__ == '__main__':
pass

@ -2,60 +2,20 @@ from __future__ import division
import warnings import warnings
import copy import copy
import numpy as np import numpy as np
from numpy import pi, sqrt, ones, zeros, exp #@UnresolvedImport from numpy import pi, sqrt, ones, zeros #@UnresolvedImport
from scipy import integrate as intg from scipy import integrate as intg
import scipy.special.orthogonal as ort import scipy.special.orthogonal as ort
from scipy import special as sp from scipy import special as sp
import pylab as plb import pylab as plb
from wafo.misc import meshgrid from scipy.integrate import simps, trapz
from wafo.misc import is_numlike
from wafo.demos import humps
_POINTS_AND_WEIGHTS = {} _POINTS_AND_WEIGHTS = {}
__all__ = ['peaks', 'humps', 'is_numlike', 'dea3', 'clencurt', 'romberg', __all__ = ['dea3', 'clencurt', 'romberg',
'h_roots','j_roots', 'la_roots','p_roots','qrule', 'h_roots','j_roots', 'la_roots','p_roots','qrule',
'gaussq', 'richardson', 'quadgr', 'qdemo'] 'gaussq', 'richardson', 'quadgr', 'qdemo']
def peaks(x=None, y=None, n=51):
'''
Return the "well" known MatLab (R) peaks function
evaluated in the [-3,3] x,y range
Example
-------
>>> import pylab as plt
>>> x,y,z = peaks()
>>> h = plt.contourf(x,y,z)
'''
if x is None:
x = np.linspace(-3, 3, n)
if y is None:
y = np.linspace(-3, 3, n)
[x1, y1] = meshgrid(x, y)
z = (3 * (1 - x1) ** 2 * exp(-(x1 ** 2) - (y1 + 1) ** 2)
- 10 * (x1 / 5 - x1 ** 3 - y1 ** 5) * exp(-x1 ** 2 - y1 ** 2)
- 1. / 3 * exp(-(x1 + 1) ** 2 - y1 ** 2))
return x1, y1, z
def humps(x=None):
'''
Computes a function that has three roots, and some humps.
'''
if x is None:
y = np.linspace(0, 1)
else:
y = np.asarray(x)
return 1.0 / ((y - 0.3) ** 2 + 0.01) + 1.0 / ((y - 0.9) ** 2 + 0.04) + 2 * y - 5.2
def is_numlike(obj):
'return true if *obj* looks like a number'
try:
obj + 1
except TypeError: return False
else: return True
def dea3(v0, v1, v2): def dea3(v0, v1, v2):
''' '''
@ -530,7 +490,6 @@ def j_roots(n, alpha, beta, method='newton'):
a = 2. * j * (j + alfbet) * tmp a = 2. * j * (j + alfbet) * tmp
tmp = tmp + 2 tmp = tmp + 2
c = 2 * (j - 1 + alpha) * (j - 1 + beta) * tmp c = 2 * (j - 1 + alpha) * (j - 1 + beta) * tmp
b = (tmp - 1) * (alpha ** 2 - beta ** 2 + tmp * (tmp - 2) * z) b = (tmp - 1) * (alpha ** 2 - beta ** 2 + tmp * (tmp - 2) * z)
L[kp1, :] = (b * L[k0, :] - c * L[km1, :]) / a L[kp1, :] = (b * L[k0, :] - c * L[km1, :]) / a

@ -23,7 +23,7 @@ except:
floatinfo = finfo(float) floatinfo = finfo(float)
__all__ = ['JITImport', 'DotDict', 'Bunch', 'printf', 'sub_dict_select', __all__ = ['is_numlike','JITImport', 'DotDict', 'Bunch', 'printf', 'sub_dict_select',
'parse_kwargs', 'detrendma', 'ecross', 'findcross', 'parse_kwargs', 'detrendma', 'ecross', 'findcross',
'findextrema', 'findpeaks', 'findrfc', 'rfcfilter', 'findtp', 'findtc', 'findextrema', 'findpeaks', 'findrfc', 'rfcfilter', 'findtp', 'findtc',
'findoutliers', 'common_shape', 'argsreduce', 'findoutliers', 'common_shape', 'argsreduce',
@ -31,6 +31,13 @@ __all__ = ['JITImport', 'DotDict', 'Bunch', 'printf', 'sub_dict_select',
'discretize', 'discretize2', 'pol2cart', 'cart2pol', 'meshgrid', 'ndgrid', 'discretize', 'discretize2', 'pol2cart', 'cart2pol', 'meshgrid', 'ndgrid',
'trangood', 'tranproc', 'histgrm', 'num2pistr'] 'trangood', 'tranproc', 'histgrm', 'num2pistr']
def is_numlike(obj):
'return true if *obj* looks like a number'
try:
obj + 1
except TypeError: return False
else: return True
class JITImport(object): class JITImport(object):
''' '''
Just In Time Import of module Just In Time Import of module

@ -28,7 +28,7 @@ from numpy.fft import fft
from numpy.random import randn from numpy.random import randn
from scipy.integrate import trapz from scipy.integrate import trapz
from pylab import stineman_interp from pylab import stineman_interp
from matplotlib.mlab import psd from matplotlib.mlab import psd, detrend_mean
import scipy.signal import scipy.signal
@ -40,6 +40,9 @@ from wafodata import WafoData
from plotbackend import plotbackend from plotbackend import plotbackend
import matplotlib import matplotlib
from scipy.stats.stats import skew, kurtosis from scipy.stats.stats import skew, kurtosis
from scipy.signal.windows import parzen
from scipy import special
matplotlib.interactive(True) matplotlib.interactive(True)
_wafocov = JITImport('wafo.covariance') _wafocov = JITImport('wafo.covariance')
_wafospec = JITImport('wafo.spectrum') _wafospec = JITImport('wafo.spectrum')
@ -47,6 +50,8 @@ _wafospec = JITImport('wafo.spectrum')
__all__ = ['TimeSeries', 'LevelCrossings', 'CyclePairs', 'TurningPoints', __all__ = ['TimeSeries', 'LevelCrossings', 'CyclePairs', 'TurningPoints',
'sensortypeid', 'sensortype'] 'sensortypeid', 'sensortype']
def _invchi2(q, df):
return special.chdtri(df, q)
class LevelCrossings(WafoData): class LevelCrossings(WafoData):
''' '''
@ -856,7 +861,7 @@ class TimeSeries(WafoData):
acf.norm = norm acf.norm = norm
return acf return acf
def tospecdata(self, *args, **kwargs): def tospecdata(self, L=None, tr=None, method='cov', detrend=detrend_mean, window=parzen, noverlap=0, pad_to=None):
""" """
Return power spectral density by Welches average periodogram method. Return power spectral density by Welches average periodogram method.
@ -866,9 +871,6 @@ class TimeSeries(WafoData):
if len(data) < NFFT, it will be zero padded to `NFFT` if len(data) < NFFT, it will be zero padded to `NFFT`
before estimation. Must be even; a power 2 is most efficient. before estimation. Must be even; a power 2 is most efficient.
detrend : function detrend : function
Fs : real, scalar
sampling frequency (samples per time unit).
window : vector of length NFFT or function window : vector of length NFFT or function
To create window vectors see numpy.blackman, numpy.hamming, To create window vectors see numpy.blackman, numpy.hamming,
numpy.bartlett, scipy.signal, scipy.signal.get_window etc. numpy.bartlett, scipy.signal, scipy.signal.get_window etc.
@ -893,11 +895,180 @@ class TimeSeries(WafoData):
Bendat & Piersol (1986) Random Data: Analysis and Measurement Bendat & Piersol (1986) Random Data: Analysis and Measurement
Procedures, John Wiley & Sons Procedures, John Wiley & Sons
""" """
fs = 1. / (2 * self.sampling_period()) dt = self.sampling_period()
S, f = psd(self.data.ravel(), Fs=fs, *args, **kwargs) #fs = 1. / (2 * dt)
yy = self.data.ravel() if tr is None else tr.dat2gauss(self.data.ravel())
yy = detrend(yy) if hasattr(detrend,'__call__') else yy
S, f = psd(yy, Fs=1./dt, NFFT=nfft, detrend=detrend, window=window,
noverlap=noverlap,pad_to=pad_to, scale_by_freq=True)
fact = 2 * 2.0 * pi fact = 2 * 2.0 * pi
w = fact * f w = fact * f
return _wafospec.SpecData1D(S / fact, w) return _wafospec.SpecData1D(S / fact, w)
def specdata(self, L=None, tr=None, method='cov',dflag='mean', ftype='w'):
'''
Estimate one-sided spectral density from data.
Parameters
----------
L : scalar integer
maximum lag size of the window function. As L decreases the estimate
becomes smoother and Bw increases. If we want to resolve peaks in
S which is Bf (Hz or rad/sec) apart then Bw < Bf. If no value is given the
lag size is set to be the lag where the auto correlation is less than
2 standard deviations. (maximum 300)
tr : transformation object
the transformation assuming that x is a sample of a transformed
Gaussian process. If g is None then x is a sample of a Gaussian process (Default)
method : string
defining estimation method. Options are
'cov' : Frequency smoothing using a parzen window function
on the estimated autocovariance function. (default)
'psd' : Welch's averaged periodogram method with no overlapping batches
dflag : string
defining detrending performed on the signal before estimation.
'mean','linear' or 'ma' (= moving average) (default 'mean')
ftype : character
defining frequency type: 'w' or 'f' (default 'w')
Returns
---------
spec : SpecData1D object
Example
-------
x = load('sea.dat');
S = dat2spec(x);
specplot(S)
See also
--------
dat2tr, dat2cov
References:
-----------
Georg Lindgren and Holger Rootzen (1986)
"Stationära stokastiska processer", pp 173--176.
Gareth Janacek and Louise Swift (1993)
"TIME SERIES forecasting, simulation, applications",
pp 75--76 and 261--268
Emanuel Parzen (1962),
"Stochastic Processes", HOLDEN-DAY,
pp 66--103
'''
#% Initialize constants
#%~~~~~~~~~~~~~~~~~~~~~
nugget = 0; #%10^-12;
rate = 2; #% interpolationrate for frequency
tapery = 0; #% taper the data before the analysis
wdef = 1; #% 1=parzen window 2=hanning window, 3= bartlett window
dt = self.sampling_period()
yy = self.data if tr is None else tr.dat2gauss(self.data)
n = len(yy)
L = min(L,n);
dflag = dflag.lower()
if dflag=='mean':
yy -= yy.mean()
elif dflag== 'linear':
yy = detrend(yy,1); # % signal toolbox detrend
elif dflag== 'ma':
dL = np.ceil(1200/2/dT); # % approximately 20 min. moving average
yy = detrendma(yy,dL);
dflag = 'mean';
max_L = min(300,n); #% maximum lag if L is undetermined
change_L = L is None
if change_L:
L = min(n-2, int(4./3*max_L+0.5))
if method=='cov' or change_L:
R = self.tocovdata()
if change_L:
#finding where ACF is less than 2 st. deviations.
L = max_L-(np.abs(R.data[max_L::-1])>2*R.stdev[max_L::-1]).argmax() # a better L value
hasattr(windom, '__call__')
if wdef==1: # % modify L so that hanning and Parzen give appr. the same result
L = min(int(4*L/3),n-2)
print('The default L is set to %d' % L)
try:
win = window(2*L-1)
wname = window.__name__
except:
wname = None
win = window
v = None
Be = None
nf = rate*2**nextpow2(2*L-2) # Interpolate the spectrum with rate
nfft = 2*nf
# S = createspec('freq',ftype);
# S.tr = g;
# S.note = ['dat2spec(',inputname(1),'), Method = ' method ];
# S.norm = 0; % not normalized
# S.L = L;
# S.S = zeros(nf+1,m-1);
if method=='psd':
S, f = psd(yy, Fs=1./dt, NFFT=nfft, detrend=detrend, window=window,
noverlap=noverlap,pad_to=pad_to, scale_by_freq=True)
else :# cov method
# add a nugget effect to ensure that round off errors
# do not result in negative spectral estimates
R.data[:L] = R.data[:L]*win[L::]
R.data[L:] = 0
spec = R.tospecdata(rate=2,nugget=nugget)
if ( ~isempty(p) ),
alpha = (1-p);
#% Confidence interval constants
CI = [v/_invchi2( 1-alpha/2 ,v), v/_invchi2( alpha/2 ,v)];
ind = find(Rper<0);
if any(ind)
Rper(ind) = 0; % set negative values to zero
warning('WAFO:DAT2SPEC','negative spectral estimates')
end
if wname=='parzen':
v = int(3.71*n/L) # degrees of freedom used in chi^2 distribution
Be = 2*pi*1.33/(L*dT) # % bandwidth (rad/sec)
elif wname=='hanning':
v = int(2.67*n/L); # degrees of freedom used in chi^2 distribution
Be = 2*pi/(L*dT); # % bandwidth (rad/sec)
elif wname=='bartlett':
v = int(3*n/L); # degrees of freedom used in chi^2 distribution
Be = 2*pi*1.33/(L*dT); # bandwidth (rad/sec)
if ftype=='w'
S.w = (0:nf)'/nf*pi/dT; % (rad/s)
S.S = real(Rper(1:(nf+1),1))*dT/pi; % (m^2*s/rad)one sided spectrum
S.Bw = Be;
else % ftype == f
S.f = (0:nf)'/nf/2/dT; % frequency Hz if dT is in seconds
S.S = 2*real(Rper(1:(nf+1),1))*dT; % (m^2*s) one sided spectrum
S.Bw = Be/(2*pi); % bandwidth in Hz
def trdata(self, method='nonlinear', **options): def trdata(self, method='nonlinear', **options):
''' '''
Estimate transformation, g, from data. Estimate transformation, g, from data.

@ -0,0 +1,16 @@
'''
python setup.py build_src build_ext --inplace
See also http://www.scipy.org/Cookbook/CompilingExtensionsOnWindowsWithMinGW
'''
# File setup.py
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('',parent_package,top_path)
config.add_extension('c_library',
sources = ['c_library.pyf','c_functions.c'])
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())

@ -0,0 +1,30 @@
'''
python setup.py build_src build_ext --inplace
See also http://www.scipy.org/Cookbook/CompilingExtensionsOnWindowsWithMinGW
'''
# File setup.py
def compile_all():
import os
files = ['mvnprd', 'mvnprodcorrprb']
compile1_format = 'gfortran -fPIC -c %s.f'
for file in files:
os.system(compile1_format % file)
file_objects = ['%s.o' % file for file in files]
return file_objects
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
libs = compile_all()
config = Configuration('',parent_package,top_path)
config.add_extension('mvnprdmod',
libraries = libs,
sources = ['mvnprd_interface.f'])
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())

@ -5,6 +5,7 @@ Spectrum package in WAFO Toolbox.
""" """
from core import * #SpecData1D, SpecData2D, cltext from core import *
#SpecData1D, SpecData2D, cltext
import models import models
import dispersion_relation import dispersion_relation

@ -19,8 +19,8 @@ from pylab import stineman_interp
from dispersion_relation import w2k #, k2w from dispersion_relation import w2k #, k2w
from wafo.wafodata import WafoData, now from wafo.wafodata import WafoData, now
from wafo.misc import sub_dict_select, nextpow2, discretize, JITImport, findpeaks #, tranproc from wafo.misc import sub_dict_select, nextpow2, discretize, JITImport, findpeaks #, tranproc
from wafo.graphutil import cltext
try: try:
from wafo.gaussian import Rind from wafo.gaussian import Rind
except ImportError: except ImportError:
@ -41,7 +41,7 @@ from wafo.plotbackend import plotbackend
_WAFOCOV = JITImport('wafo.covariance') _WAFOCOV = JITImport('wafo.covariance')
__all__ = ['SpecData1D', 'SpecData2D', 'cltext', 'plotspec'] __all__ = ['SpecData1D', 'SpecData2D', 'plotspec']
def _set_seed(iseed): def _set_seed(iseed):
'''Set seed of random generator''' '''Set seed of random generator'''
@ -235,7 +235,7 @@ def plotspec(specdata, linetype='b-', flag=1):
ylbl4_txt = 'Wave Number Spectrum'; ylbl4_txt = 'Wave Number Spectrum';
else: else:
raise ValueError('Frequency type unknown') raise ValueError('Frequency type unknown')
if hasattr(specdata, 'norm') and specdata.norm : if hasattr(specdata, 'norm') and specdata.norm :
@ -607,101 +607,6 @@ def plotspec(specdata, linetype='b-', flag=1):
# #
def cltext(levels, percent=False, n=4, xs=0.036, ys=0.94, zs=0):
'''
Places contour level text in the current window
Parameters
----------
levels = vector of contour levels or the corresponding percent which the
contour line encloses
percent = 0 if levels are the actual contour levels (default)
1 if levels are the corresponding percent which the
contour line encloses
n = maximum N digits of precision (default 4)
Returns
h = handles to the text objects.
CLTEXT creates text objects in the current figure and prints
"Level curves at:" if percent is False and
"Level curves enclosing:" otherwise
and the contour levels or percent.
NOTE:
-The handles to the lines of text may also be found by
h = findobj(gcf,'gid','CLTEXT','type','text');
h = findobj(gca,'gid','CLTEXT','type','text');
-To make the text objects follow the data in the axes set the units
for the text objects 'data' by
set(h,'unit','data')
Examples:
>>> from wafo.integrate import peaks
>>> import pylab as plt
>>> x,y,z = peaks();
>>> h = plt.contour(x,y,z)
>>> h = cltext(h.levels)
>>> plt.show()
data = rndray(1,2000,2); f = kdebin(data,{'kernel','epan','L2',.5,'inc',128});
contour(f.x{:},f.f,f.cl),cltext(f.pl,1)
See also
pdfplot
'''
# TODO : Make it work like legend does (but without the box): include position options etc...
clevels = np.atleast_1d(levels)
_CLTEXT_TAG = 'CLTEXT'
cax = plotbackend.gca()
axpos = cax.get_position()
xint = axpos.intervalx
yint = axpos.intervaly
xss = xint[0] + xs * (xint[1] - xint[0])
yss = yint[0] + ys * (yint[1] - yint[0])
cf = plotbackend.gcf() # get current figure
#% delete cltext object if it exists
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
def matchfun(x):
if hasattr(x, 'get_gid'):
return x.get_gid() == _CLTEXT_TAG
return False
h_cltxts = plotbackend.findobj(cf, matchfun);
if len(h_cltxts):
for i in h_cltxts:
try:
cax.texts.remove(i)
except:
warnings.warn('Tried to delete a non-existing CL-text')
try:
cf.texts.remove(i)
except:
warnings.warn('Tried to delete a non-existing CL-text')
charHeight = 1 / 33;
delta_y = charHeight;
if percent:
titletxt = 'Level curves enclosing:';
else:
titletxt = 'Level curves at:';
format = '%0.' + ('%d' % n) + 'g\n'
cltxt = ''.join([format % level for level in clevels.tolist()])
titleProp = dict(gid=_CLTEXT_TAG, horizontalalignment='left',
verticalalignment='center', fontweight='bold', axes=cax) #
ha1 = plotbackend.figtext(xss, yss, titletxt, **titleProp)
yss -= delta_y;
txtProp = dict(gid=_CLTEXT_TAG, horizontalalignment='left',
verticalalignment='top', axes=cax)
ha2 = plotbackend.figtext(xss, yss, cltxt, **txtProp)
return ha1, ha2
class SpecData1D(WafoData): class SpecData1D(WafoData):
@ -754,8 +659,7 @@ class SpecData1D(WafoData):
self.phi = 0.0 self.phi = 0.0
self.v = 0.0 self.v = 0.0
self.norm = False self.norm = False
somekeys = ['angletype', 'phi', 'name', 'h', 'tr', 'freqtype', 'v', somekeys = ['phi', 'name', 'h', 'tr', 'freqtype', 'v','type', 'norm']
'type', 'norm']
self.__dict__.update(sub_dict_select(kwds, somekeys)) self.__dict__.update(sub_dict_select(kwds, somekeys))

@ -0,0 +1,32 @@
'''
Created on 19. juli 2010
@author: pab
'''
from wafo.spectrum.dispersion_relation import w2k,k2w
def test_k2w():
'''
>>> from numpy import arange
>>> k2w(arange(0.01,.5,0.2))[0]
array([ 0.3132092 , 1.43530485, 2.00551739])
>>> k2w(arange(0.01,.5,0.2),h=20)[0]
array([ 0.13914927, 1.43498213, 2.00551724])
'''
def test_w2k():
'''
>>> w2k(range(4))[0]
array([ 0. , 0.1019368 , 0.4077472 , 0.91743119])
>>> w2k(range(4),h=20)[0]
array([ 0. , 0.10503601, 0.40774726, 0.91743119])
'''
def main():
import doctest
doctest.testmod()
if __name__ == '__main__':
main()

@ -0,0 +1,68 @@
import numpy as np
from wafo.spectrum.models import (Bretschneider, Jonswap, OchiHubble, Tmaspec,
Torsethaugen, McCormick, Wallop)
def test_bretschneider():
'''
>>> S = Bretschneider(Hm0=6.5,Tp=10)
>>> S((0,1,2,3))
array([ 0. , 1.69350993, 0.06352698, 0.00844783])
'''
def test_jonswap():
'''
>>> S = Jonswap(Hm0=7, Tp=11,gamma=1)
>>> S((0,1,2,3))
array([ 0. , 1.42694133, 0.05051648, 0.00669692])
>>> w = np.linspace(0,5)
>>> S2 = Bretschneider(Hm0=7, Tp=11)
JONSWAP with gamma=1 should be equal to Bretscneider:
>>> np.all(np.abs(S(w)-S2(w))<1.e-7)
True
'''
def test_tmaspec():
'''
>>> S = Tmaspec(Hm0=7, Tp=11,gamma=1,h=10)
>>> S((0,1,2,3))
array([ 0. , 0.70106233, 0.05022433, 0.00669692])
'''
def test_torsethaugen():
'''
>>> S = Torsethaugen(Hm0=7, Tp=11,gamma=1,h=10)
>>> S((0,1,2,3))
array([ 0. , 1.19989709, 0.05819794, 0.0093541 ])
>>> S.wind(range(4))
array([ 0. , 1.13560528, 0.05529849, 0.00888989])
>>> S.swell(range(4))
array([ 0. , 0.0642918 , 0.00289946, 0.00046421])
'''
def test_ochihubble():
'''
>>> S = OchiHubble(par=2)
>>> S(range(4))
array([ 0. , 0.90155636, 0.04185445, 0.00583207])
'''
def test_mccormick():
'''
>>> S = McCormick(Hm0=6.5,Tp=10)
>>> S(range(4))
array([ 0. , 1.87865908, 0.15050447, 0.02994663])
'''
def test_wallop():
'''
>>> S = Wallop(Hm0=6.5, Tp=10)
>>> S(range(4))
array([ 0.00000000e+00, 9.36921871e-01, 2.76991078e-03,
7.72996150e-05])
'''
def main():
import doctest
doctest.testmod()
if __name__ == '__main__':
main()

@ -0,0 +1,200 @@
import wafo.spectrum.models
from wafo.spectrum import SpecData1D
def test_tocovmatrix():
'''
>>> import wafo.spectrum.models as sm
>>> Sj = sm.Jonswap()
>>> S = Sj.tospecdata()
>>> acfmat = S.tocov_matrix(nr=3, nt=256, dt=0.1)
>>> acfmat[:2,:]
array([[ 3.06075987, 0. , -1.67750289, 0. ],
[ 3.05246132, -0.16662376, -1.66819445, 0.18634189]])
'''
def test_tocovdata():
'''
>>> import wafo.spectrum.models as sm
>>> Sj = sm.Jonswap()
>>> S = Sj.tospecdata()
>>> Nt = len(S.data)-1
>>> acf = S.tocovdata(nr=0, nt=Nt)
>>> acf.data[:5]
array([ 3.06093287, 2.23846752, 0.48630084, -1.1336035 , -2.03036854])
'''
def test_to_t_pdf():
'''
The density of Tc is computed by:
>>> from wafo.spectrum import models as sm
>>> Sj = sm.Jonswap()
>>> S = Sj.tospecdata()
>>> f = S.to_t_pdf(pdef='Tc', paramt=(0, 10, 51), speed=7, seed=100)
>>> ['%2.3f' % val for val in f.data[:10]]
['0.000', '0.014', '0.027', '0.040', '0.050', '0.059', '0.067', '0.072', '0.077', '0.081']
estimated error bounds
>>> ['%2.4f' % val for val in f.err[:10]]
['0.0000', '0.0003', '0.0003', '0.0004', '0.0006', '0.0009', '0.0016', '0.0019', '0.0020', '0.0021']
'''
def test_sim():
'''
>>> import wafo.spectrum.models as sm
>>> Sj = sm.Jonswap();S = Sj.tospecdata()
>>> ns =100; dt = .2
>>> x1 = S.sim(ns,dt=dt)
>>> import numpy as np
>>> import scipy.stats as st
>>> x2 = S.sim(20000,20)
>>> truth1 = [0,np.sqrt(S.moment(1)[0]),0., 0.]
>>> funs = [np.mean,np.std,st.skew,st.kurtosis]
>>> for fun,trueval in zip(funs,truth1):
... res = fun(x2[:,1::],axis=0)
... m = res.mean()
... sa = res.std()
... #trueval, m, sa
... np.abs(m-trueval)<sa
True
array([ True], dtype=bool)
True
True
'''
def test_sim_nl():
'''
>>> import wafo.spectrum.models as sm
>>> Sj = sm.Jonswap();S = Sj.tospecdata()
>>> ns =100; dt = .2
>>> x1 = S.sim_nl(ns,dt=dt)
>>> import numpy as np
>>> import scipy.stats as st
>>> x2, x1 = S.sim_nl(ns=20000,cases=40)
>>> truth1 = [0,np.sqrt(S.moment(1)[0][0])] + S.stats_nl(moments='sk')
>>> truth1[-1] = truth1[-1]-3
>>> truth1
[0, 1.7495200310090633, 0.18673120577479801, 0.061988521262417606]
>>> funs = [np.mean,np.std,st.skew,st.kurtosis]
>>> for fun,trueval in zip(funs,truth1):
... res = fun(x2[:,1::], axis=0)
... m = res.mean()
... sa = res.std()
... #trueval, m, sa
... np.abs(m-trueval)<2*sa
True
True
True
True
'''
def test_stats_nl():
'''
>>> import wafo.spectrum.models as sm
>>> Hs = 7.
>>> Sj = sm.Jonswap(Hm0=Hs, Tp=11)
>>> S = Sj.tospecdata()
>>> me, va, sk, ku = S.stats_nl(moments='mvsk')
>>> me; va; sk; ku
0.0
3.0608203389019537
0.18673120577479801
3.0619885212624176
'''
def test_testgaussian():
'''
>>> import wafo.spectrum.models as sm
>>> import wafo.transform.models as wtm
>>> import wafo.objects as wo
>>> Hs = 7
>>> Sj = sm.Jonswap(Hm0=Hs)
>>> S0 = Sj.tospecdata()
>>> ns =100; dt = .2
>>> x1 = S0.sim(ns, dt=dt)
>>> S = S0.copy()
>>> me, va, sk, ku = S.stats_nl(moments='mvsk')
>>> S.tr = wtm.TrHermite(mean=me, sigma=Hs/4, skew=sk, kurt=ku, ysigma=Hs/4)
>>> ys = wo.mat2timeseries(S.sim(ns=2**13))
>>> g0, gemp = ys.trdata()
>>> t0 = g0.dist2gauss()
>>> t1 = S0.testgaussian(ns=2**13, t0=t0, cases=50)
>>> sum(t1>t0)<5
True
'''
def test_moment():
'''
>>> import wafo.spectrum.models as sm
>>> Sj = sm.Jonswap(Hm0=5)
>>> S = Sj.tospecdata() #Make spectrum ob
>>> S.moment()
([1.5614600345079888, 0.95567089481941048], ['m0', 'm0tt'])
'''
def test_nyquist_freq():
'''
>>> import wafo.spectrum.models as sm
>>> Sj = sm.Jonswap(Hm0=5)
>>> S = Sj.tospecdata() #Make spectrum ob
>>> S.nyquist_freq()
3.0
'''
def test_sampling_period():
'''
>>> import wafo.spectrum.models as sm
>>> Sj = sm.Jonswap(Hm0=5)
>>> S = Sj.tospecdata() #Make spectrum ob
>>> S.sampling_period()
1.0471975511965976
'''
def test_normalize():
'''
>>> import wafo.spectrum.models as sm
>>> Sj = sm.Jonswap(Hm0=5)
>>> S = Sj.tospecdata() #Make spectrum ob
>>> S.moment(2)
([1.5614600345079888, 0.95567089481941048], ['m0', 'm0tt'])
>>> Sn = S.copy(); Sn.normalize()
Now the moments should be one
>>> Sn.moment(2)
([1.0000000000000004, 0.99999999999999967], ['m0', 'm0tt'])
'''
def test_characteristic():
'''
>>> import wafo.spectrum.models as sm
>>> Sj = sm.Jonswap(Hm0=5)
>>> S = Sj.tospecdata() #Make spectrum ob
>>> S.characteristic(1)
(array([ 8.59007646]), array([[ 0.03040216]]), ['Tm01'])
>>> [ch, R, txt] = S.characteristic([1,2,3]) # fact a vector of integers
>>> ch; R; txt
array([ 8.59007646, 8.03139757, 5.62484314])
array([[ 0.03040216, 0.02834263, NaN],
[ 0.02834263, 0.0274645 , NaN],
[ NaN, NaN, 0.01500249]])
['Tm01', 'Tm02', 'Tm24']
>>> S.characteristic('Ss') # fact a string
(array([ 0.04963112]), array([[ 2.63624782e-06]]), ['Ss'])
>>> S.characteristic(['Hm0','Tm02']) # fact a list of strings
(array([ 4.99833578, 8.03139757]), array([[ 0.05292989, 0.02511371],
[ 0.02511371, 0.0274645 ]]), ['Hm0', 'Tm02'])
'''
def test_bandwidth():
'''
>>> import numpy as np
>>> import wafo.spectrum.models as sm
>>> Sj = sm.Jonswap(Hm0=3)
>>> w = np.linspace(0,4,256)
>>> S = SpecData1D(Sj(w),w) #Make spectrum object from numerical values
>>> S.bandwidth([0,1,2,3])
array([ 0.65354446, 0.3975428 , 0.75688813, 2.00207912])
'''
def main():
import doctest
doctest.testmod()
if __name__ == '__main__':
main()

@ -0,0 +1,59 @@
# -*- coding:utf-8 -*-
"""
Created on 5. aug. 2010
@author: pab
"""
import wafo.data
import numpy as np
def test_timeseries():
'''
>>> import wafo.data
>>> import wafo.objects as wo
>>> x = wafo.data.sea()
>>> ts = wo.mat2timeseries(x)
>>> ts.sampling_period()
0.25
Estimate spectrum
>>> S = ts.tospecdata()
>>> S.data[:10]
array([ 0.01350817, 0.0050932 , 0.00182003, 0.00534806, 0.049407 ,
0.25144845, 0.28264622, 0.21398405, 0.18563258, 0.25878918])
Estimated covariance function
>>> rf = ts.tocovdata(lag=150)
>>> rf.data[:10]
array([ 0.22368637, 0.20838473, 0.17110733, 0.12237803, 0.07024054,
0.02064859, -0.02218831, -0.0555993 , -0.07859847, -0.09166187])
'''
def test_timeseries_trdata():
'''
>>> import wafo.spectrum.models as sm
>>> import wafo.transform.models as tm
>>> from wafo.objects import mat2timeseries
>>> Hs = 7.0
>>> Sj = sm.Jonswap(Hm0=Hs)
>>> S = Sj.tospecdata() #Make spectrum object from numerical values
>>> S.tr = tm.TrOchi(mean=0, skew=0.16, kurt=0, sigma=Hs/4, ysigma=Hs/4)
>>> xs = S.sim(ns=2**16)
>>> ts = mat2timeseries(xs)
>>> g0, gemp = ts.trdata(monitor=True) # Monitor the development
>>> g1, gemp = ts.trdata(method='m', gvar=0.5 ) # Equal weight on all points
>>> g2, gemp = ts.trdata(method='n', gvar=[3.5, 0.5, 3.5]) # Less weight on the ends
>>> S.tr.dist2gauss()
5.9322684525265501
>>> np.round(gemp.dist2gauss())
6.0
>>> np.round(g0.dist2gauss())
4.0
>>> np.round(g1.dist2gauss())
4.0
>>> np.round(g2.dist2gauss())
4.0
'''
if __name__=='__main__':
import doctest
doctest.testmod()
Loading…
Cancel
Save