Skip to content

Commit 9d24fde

Browse files
committed
Misc updates from welib
1 parent d124047 commit 9d24fde

File tree

7 files changed

+247
-131
lines changed

7 files changed

+247
-131
lines changed

pydatview/fast/postpro.py

Lines changed: 6 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -5,11 +5,13 @@
55
import re
66
try:
77
from scipy.integrate import cumulative_trapezoid
8+
from numpy import trapezoid
89
except:
910
from scipy.integrate import cumtrapz as cumulative_trapezoid
11+
from numpy import trapz as trapezoid
1012

11-
import pydatview.io as weio
12-
from pydatview.common import PyDatViewException as WELIBException
13+
import welib.weio as weio
14+
from welib.common import WELIBException
1315

1416
# --- fast libraries
1517
from pydatview.io.fast_input_file import FASTInputFile
@@ -908,10 +910,7 @@ def insert_extra_columns_AD(dfRad, tsAvg, vr=None, rho=None, R=None, nB=None, ch
908910
Ct=nB*Fx/(0.5 * rho * 2 * U0**2 * np.pi * vr)
909911
Ct[vr<0.01*R] = 0
910912
dfRad[sB+'Ctloc_[-]'] = Ct
911-
try:
912-
CT=2*np.trapezoid(vr_bar*Ct,vr_bar)
913-
except:
914-
CT=2*np.trapz(vr_bar*Ct,vr_bar)
913+
CT=2*trapezoid(vr_bar*Ct,vr_bar)
915914
dfRad[sB+'CtAvg_[-]']= CT*np.ones(vr.shape)
916915
except:
917916
pass
@@ -1965,10 +1964,7 @@ def integrateMoment(r, F):
19651964
"""
19661965
M = np.zeros(len(r)-1)
19671966
for ir,_ in enumerate(r[:-1]):
1968-
try:
1969-
M[ir] = np.trapezoid(F[ir:]*(r[ir:]-r[ir]), r[ir:]-r[ir])
1970-
except:
1971-
M[ir] = np.trapz(F[ir:]*(r[ir:]-r[ir]), r[ir:]-r[ir])
1967+
M[ir] = trapezoid(F[ir:]*(r[ir:]-r[ir]), r[ir:]-r[ir])
19721968
return M
19731969

19741970
def integrateMomentTS(r, F):

pydatview/io/fast_input_file.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -841,12 +841,13 @@ def mat_tostring(M,fmt='24.16e'):
841841
s+='\n'
842842
s+='\n'
843843
return s
844+
844845
for i in range(len(self.data)):
845846
d=self.data[i]
846847
if d['isComment']:
847848
s+='{}'.format(d['value'])
848849
elif d['tabType']==TABTYPE_NOT_A_TAB:
849-
if isinstance(d['value'], list):
850+
if isinstance(d['value'], list) or isinstance(d['value'],np.ndarray):
850851
sList=', '.join([str(x) for x in d['value']])
851852
s+=toStringVLD(sList, d['label'], d['descr'])
852853
else:

pydatview/tools/colors.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -136,7 +136,7 @@ def make_colormap(seq,values=None,name='CustomMap'):
136136
cdict['blue'].append((v, b1, b2))
137137
if hasAlpha:
138138
cdict['alpha'].append((v, a1, a2))
139-
print(cdict)
139+
#print(cdict)
140140
return mcolors.LinearSegmentedColormap(name, cdict)
141141

142142
def cmap_colors(n, name='viridis'):

pydatview/tools/curve_fitting.py

Lines changed: 16 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -186,17 +186,19 @@ def fit_polynomial_discrete(x, y, exponents):
186186
return y_fit,pfit,{'coeffs':coeffs_dict,'formula':formula}
187187

188188

189-
def fit_powerlaw_u_alpha(x, y, z_ref=100, p0=(10,0.1)):
189+
def fit_powerlaw_u_alpha(x, u, z_ref=100, p0=(10,0.1)):
190190
"""
191+
x is z
192+
y is u
191193
p[0] : u_ref
192194
p[1] : alpha
193195
"""
194-
pfit, _ = so.curve_fit(lambda x, *p : p[0] * (x / z_ref) ** p[1], x, y, p0=p0)
195-
y_fit = pfit[0] * (x / z_ref) ** pfit[1]
196+
pfit, _ = so.curve_fit(lambda x, *p : p[0] * (x / z_ref) ** p[1], x, u, p0=p0)
197+
u_fit = pfit[0] * (x / z_ref) ** pfit[1]
196198
coeffs_dict=OrderedDict([('u_ref',pfit[0]),('alpha',pfit[1])])
197199
formula = '{u_ref} * (z / {z_ref}) ** {alpha}'
198200
fitted_fun = lambda xx: pfit[0] * (xx / z_ref) ** pfit[1]
199-
return y_fit, pfit, {'coeffs':coeffs_dict,'formula':formula,'fitted_function':fitted_fun}
201+
return u_fit, pfit, {'coeffs':coeffs_dict,'formula':formula,'fitted_function':fitted_fun}
200202

201203

202204
def polyfit2d(x, y, z, kx=3, ky=3, order=None):
@@ -980,7 +982,7 @@ def func(x, p):
980982

981983
self.model['consts'], missing = set_common_keys(self.model['consts'], fun_kwargs )
982984
if len(missing)>0:
983-
raise Exception('Curve fitting with function `{}` requires the following arguments {}. Missing: {}'.format(func.__name__,consts.keys(),missing))
985+
raise Exception('Curve fitting with function `{}` requires the following arguments {}. Missing: {}'.format(func,self.model['consts'].keys(),missing))
984986

985987
# --------------------------------------------------------------------------------}
986988
# --- Wrapper for predefined fitters
@@ -1557,6 +1559,15 @@ def pretty_num_short(x,digits=3):
15571559

15581560

15591561
if __name__ == '__main__':
1562+
1563+
1564+
# --- Some usual examples
1565+
# ufit, pfit, fitter = model_fit('predef: powerlaw_alpha', yi, ui, p0=(1/7), u_ref=u_ref,z_ref=zref); dfit=fitter.model
1566+
# ufit, pfit, fitter = model_fit('predef: powerlaw_u_alpha', yi, ui, p0=(u_ref, 1/7), z_ref=zref); dfit=fitter.model
1567+
# ufit, pfit, dfit = fit_powerlaw_u_alpha(yi, ui, zref, p0=(u_ref, 1/7))
1568+
1569+
1570+
15601571
# --- Writing example models to file for pyDatView tests
15611572
a,b,c = 2.0, 3.0, 4.0
15621573
u_ref,z_ref,alpha=10,12,0.12

pydatview/tools/signal_analysis.py

Lines changed: 68 additions & 104 deletions
Original file line numberDiff line numberDiff line change
@@ -426,31 +426,44 @@ def zero_crossings(y, x=None, direction=None, bouncingZero=False):
426426
# --------------------------------------------------------------------------------}
427427
# --- Correlation
428428
# --------------------------------------------------------------------------------{
429-
def autoCorrCoeff(x, nMax=None):
430-
if nMax is None:
431-
nMax = len(x)
432-
return np.array([1]+[np.corrcoef(x[:-i], x[i:])[0,1] for i in range(1, nMax)])
433-
434-
def correlation(x, nMax=80, dt=1, method='numpy'):
429+
def autoCorrCoeff(x, nMax=None, dt=1, method='corrcoef'):
435430
"""
436431
Compute auto correlation of a signal
432+
- nMax: number of values to return
437433
"""
438-
nvec = np.arange(0,nMax)
434+
x = x.copy() - np.mean(x)
435+
var = np.var(x)
436+
n = len(x)
437+
if nMax is None:
438+
nMax = n
439+
rvec = np.arange(0,nMax)
439440
if method=='manual':
440-
sigma2 = np.var(x)
441-
R = np.zeros(nMax)
442-
R[0] =1
443-
for i,nDelay in enumerate(nvec[1:]):
444-
R[i+1] = np.mean( x[0:-nDelay] * x[nDelay:] ) / sigma2
445-
#R[i+1] = np.corrcoef(x[:-nDelay], x[nDelay:])[0,1]
446-
447-
elif method=='numpy':
448-
R= autoCorrCoeff(x, nMax=nMax)
441+
rho = np.zeros(nMax)
442+
rho[0] =1
443+
for i,nDelay in enumerate(rvec[1:]):
444+
rho[i+1] = np.mean( x[0:-nDelay] * x[nDelay:] ) / var
445+
446+
elif method=='manual-roll':
447+
rho = np.zeros(len(rvec))
448+
for i,r in enumerate(rvec):
449+
shifted_x = np.roll(x, int(r)) #Shift x by tau
450+
rho[i] = np.mean(x * shifted_x) / var
451+
452+
elif method=='corrcoef':
453+
rho = np.array([1]+[np.corrcoef(x[:-i], x[i:])[0,1] for i in range(1, nMax)])
454+
455+
elif method=='correlate':
456+
rho = np.correlate(x, x, mode='full')[-n:] / (var * n)
457+
rho = rho[:nMax]
449458
else:
450-
raise NotImplementedError()
459+
raise NotImplementedError(method)
451460

452-
tau = nvec*dt
453-
return R, tau
461+
tau = rvec*dt
462+
return rho, tau
463+
464+
def correlation(*args, **kwargs):
465+
print('[WARN] welib.tools.signal_analysis.correlation will be deprecated use autoCorrCoeff')
466+
return autoCorrCoeff(*args, **kwargs)
454467
# Auto-correlation comes in two versions: statistical and convolution. They both do the same, except for a little detail: The statistical version is normalized to be on the interval [-1,1]. Here is an example of how you do the statistical one:
455468
#
456469
#
@@ -459,48 +472,56 @@ def correlation(x, nMax=80, dt=1, method='numpy'):
459472
# return result[result.size/2:]
460473

461474

462-
def xCorrCoeff(x, y, dt=None, mode='same', return_lags=False, method='numpy'):
475+
def xCorrCoeff(x1, x2, t=None, nMax=None, method='manual'):
463476
"""
464-
Compute and plot the cross-correlation coefficient between two signals.
465-
466-
Parameters:
467-
- x: array-like, first signal values
468-
- y: array-like, second signal values
477+
Compute cross-correlation coefficient between two signals.
469478
"""
470-
from scipy.signal import correlate
471-
# Compute cross-correlation
472-
473-
sigma1 = np.std(x)
474-
sigma2 = np.std(y)
475-
N = len(x)//2
476-
N3 = len(x)//3
477-
if method=='manual':
478-
nMax = len(x)
479-
R = np.zeros(nMax)
480-
R[0] =1
481-
nvec = np.arange(0,nMax)
482-
for i,nDelay in enumerate(nvec[1:]):
483-
R[i+1] = np.mean( x[0:-nDelay] * y[nDelay:] ) / (sigma1*sigma2)
484-
#R[i+1] = np.corrcoef(x[:-nDelay], x[nDelay:])[0,1]
485-
cross_corr= R
479+
x1 = x1.copy()-np.mean(x1)
480+
x2 = x2.copy()-np.mean(x2)
481+
sigma1 = np.std(x1)
482+
sigma2 = np.std(x2)
483+
# Only if x1 and x2 have the same length for now
484+
N1 = min(len(x1), len(x2))
485+
if nMax is None:
486+
nMax = len(x1)
487+
if t is None:
488+
t = np.array(range(N1))
489+
if method=='subset-tauPos':
490+
# Only if x1 and x2 have the same length
491+
rho = np.zeros(nMax)
492+
rvec = np.arange(0,nMax)
493+
for i,r in enumerate(rvec):
494+
rho[i] = np.mean( x1[:N1-r] * x2[r:] ) / (sigma1*sigma2)
495+
elif method=='manual':
496+
rvec = np.array(range(-nMax+1,nMax))
497+
rho = np.zeros(len(rvec))
498+
# TODO two for loops for pos and neg..
499+
for i,r in enumerate(rvec):
500+
if r>=0:
501+
t11, x11 = t [0:N1-r], x1[0:N1-r]
502+
t22, x22 = t [r:] , x2[r:]
503+
else:
504+
r=abs(r)
505+
t22, x22 = t [0:N1-r], x2[0:N1-r]
506+
t11, x11 = t [r:] , x1[r:]
507+
rho[i] = np.mean(x11*x22) / (sigma1*sigma2)
486508
else:
509+
raise NotImplementedError(method)
487510
cross_corr = correlate(x, y, mode=mode)/ min(len(x), len(y)) / (sigma1*sigma2)
488511
if mode=='same':
489512
cross_corr =np.concatenate( [ cross_corr[N:], cross_corr[:N] ] )
490513
cross_corr[N3:2*N3]=0
491-
492-
if return_lags:
493-
# --- Compute lags
494514
if mode=='full':
495515
lags = np.arange(-len(x) + 1, len(x)) * dt
496516
elif mode=='same':
497517
lags = (np.arange(len(x)) - N) * dt
498518
lags = np.concatenate( [ lags[N:], lags[:N] ] )
499519
else:
500520
raise NotImplementedError(mode)
501-
return cross_corr, lags
502-
else:
503-
return cross_corr
521+
522+
523+
tau = rvec * (t[1]-t[0])
524+
return rho, tau
504525

505526

506527
def correlated_signal(coeff, n=1000, seed=None):
@@ -522,63 +543,6 @@ def correlated_signal(coeff, n=1000, seed=None):
522543
return x
523544

524545

525-
# --------------------------------------------------------------------------------}
526-
# ---
527-
# --------------------------------------------------------------------------------{
528-
# def crosscorr_2(ts, iy0=None, iz0=None):
529-
# """ Cross correlation along y
530-
# If no index is provided, computed at mid box
531-
# """
532-
# y = ts['y']
533-
# if iy0 is None:
534-
# iy0,iz0 = ts.iMid
535-
# u, v, w = ts._longiline(iy0=iy0, iz0=iz0, removeMean=True)
536-
# rho_uu_y=np.zeros(len(y))
537-
# for iy,_ in enumerate(y):
538-
# ud, vd, wd = ts._longiline(iy0=iy, iz0=iz0, removeMean=True)
539-
# rho_uu_y[iy] = np.mean(u*ud)/(np.std(u)*np.std(ud))
540-
# return y, rho_uu_y
541-
#
542-
# def csd_longi(ts, iy0=None, iz0=None):
543-
# """ Compute cross spectral density
544-
# If no index is provided, computed at mid box
545-
# """
546-
# import scipy.signal as sig
547-
# u, v, w = ts._longiline(iy0=iy0, iz0=iz0, removeMean=True)
548-
# t = ts['t']
549-
# dt = t[1]-t[0]
550-
# fs = 1/dt
551-
# fc, chi_uu = sig.csd(u, u, fs=fs, scaling='density') #nperseg=4096, noverlap=2048, detrend='constant')
552-
# fc, chi_vv = sig.csd(v, v, fs=fs, scaling='density') #nperseg=4096, noverlap=2048, detrend='constant')
553-
# fc, chi_ww = sig.csd(w, w, fs=fs, scaling='density') #nperseg=4096, noverlap=2048, detrend='constant')
554-
# return fc, chi_uu, chi_vv, chi_ww
555-
#
556-
# def coherence_longi(ts, iy0=None, iz0=None):
557-
# """ Coherence on a longitudinal line for different delta y and delta z
558-
# compared to a given point with index iy0,iz0
559-
# """
560-
# try:
561-
# import scipy.signal as sig
562-
# except:
563-
# import pydatview.tools.spectral as sig
564-
# if iy0 is None:
565-
# iy0,iz0 = ts.iMid
566-
# u, v, w = ts._longiline(iy0=iy0, iz0=iz0, removeMean=True)
567-
# y = ts['y']
568-
# z = ts['z']
569-
# diy=1
570-
# dy=y[iy]-y[iy0]
571-
# # TODO
572-
# iy = iy0+diy
573-
# ud, vd, wd = ts._longiline(iy0=iy, iz0=iz0, removeMean=True)
574-
# fc, coh_uu_y1 = sig.coherence(u,ud, fs=fs)
575-
576-
577-
578-
579-
# --------------------------------------------------------------------------------}
580-
# ---
581-
# --------------------------------------------------------------------------------{
582546
def find_time_offset(t, f, g, outputAll=False):
583547
"""
584548
Find time offset between two signals (may be negative)

0 commit comments

Comments
 (0)