From ed1b8a817f42d34e0701ae15c3b17520745dca2c Mon Sep 17 00:00:00 2001 From: Emmanuel Branlard Date: Sat, 31 Jul 2021 11:03:06 -0600 Subject: [PATCH 01/36] Tools: sine curve fitting and small welib updates --- pydatview/fast/postpro.py | 21 +- pydatview/tools/curve_fitting.py | 801 +++++++++++++++++-------------- pydatview/tools/damping.py | 4 +- pydatview/tools/fatigue.py | 4 +- pydatview/tools/signal.py | 224 ++++++++- pydatview/tools/spectral.py | 2 +- 6 files changed, 681 insertions(+), 375 deletions(-) diff --git a/pydatview/fast/postpro.py b/pydatview/fast/postpro.py index b596633..7b13e70 100644 --- a/pydatview/fast/postpro.py +++ b/pydatview/fast/postpro.py @@ -728,9 +728,11 @@ def addToOutlist(OutList, Signals): # --------------------------------------------------------------------------------} # --- Generic df # --------------------------------------------------------------------------------{ -def remap_df(df, ColMap, bColKeepNewOnly=False, inPlace=False): +def remap_df(df, ColMap, bColKeepNewOnly=False, inPlace=False, dataDict=None, verbose=False): """ Add/rename columns of a dataframe, potentially perform operations between columns + dataDict: dicitonary of data to be made available as "variable" in the column mapping + Example: ColumnMap={ @@ -744,17 +746,26 @@ def remap_df(df, ColMap, bColKeepNewOnly=False, inPlace=False): df = fastlib.remap_df(df, ColumnMap, inplace=True) """ + # Insert dataDict into namespace + if dataDict is not None: + for k,v in dataDict.items(): + exec('{:s} = dataDict["{:s}"]'.format(k,k)) + + if not inPlace: df=df.copy() ColMapMiss=[] ColNew=[] RenameMap=dict() + # Loop for expressions for k0,v in ColMap.items(): k=k0.strip() v=v.strip() if v.find('{')>=0: search_results = re.finditer(r'\{.*?\}', v) expr=v + if verbose: + print('Attempt to insert column {:15s} with expr {}'.format(k,v)) # For more advanced operations, we use an eval bFail=False for item in search_results: @@ -779,6 +790,8 @@ def remap_df(df, ColMap, bColKeepNewOnly=False, inPlace=False): # Applying renaming only now so that expressions may be applied in any order for k,v in RenameMap.items(): + if verbose: + print('Renaming column {:15s} > {}'.format(v,k)) k=k.strip() iCol = list(df.columns).index(v) df.columns.values[iCol]=k @@ -870,11 +883,11 @@ def find_matching_pattern(List, pattern): def extractSpanTSReg(ts, col_pattern, colname, IR=None): - """ Helper function to extract spanwise results, like B1N1Cl B1N2Cl etc. + r""" Helper function to extract spanwise results, like B1N1Cl B1N2Cl etc. Example - col_pattern: 'B1N(\d*)Cl_\[-\]' - colname : 'B1Cl_[-]' + col_pattern: r'B1N(\d*)Cl_\[-\]' + colname : r'B1Cl_[-]' """ # Extracting columns matching pattern cols, sIdx = find_matching_pattern(ts.keys(), col_pattern) diff --git a/pydatview/tools/curve_fitting.py b/pydatview/tools/curve_fitting.py index 9e7b04b..dc5e9c8 100644 --- a/pydatview/tools/curve_fitting.py +++ b/pydatview/tools/curve_fitting.py @@ -1,3 +1,78 @@ +""" +Set of tools to fit a model to data. + +The quality of a fit is usually a strong function of the initial guess. +Because of this this package contains different kind of "helpers" and "wrapper" tools. + +FUNCTIONS +--------- + +This package can help fitting using: + 1) High level functions, e.g. fit_sinusoid +OR using the `model_fit` function that handles: + 2) User defined "eval" model, e.g. the user sets a string '{a}*x + {b}*x**2' + 3) Predefined models, e.g. Gaussian, logarithmic, weibull_pdf, etc. + 4) Predefined fitters, e.g. SinusoidFitter, DiscretePolynomialFitter, ContinuousPolynomialFitter + +1) The high level fitting functions available are: + - fit_sinusoid + - fit_polynomial + - fit_gaussian + +2) User defined model, using the `model_fit_function`: + - model_fit('eval: {a} + {b}*x**3 + {c}*x**5', x, y) + - model_fit('eval: {u_ref}*(x/{z_ref})**{alpha}', x, y, p0=(8,9,0.1), bounds=(0.001,100)) + User defined models, will require the user to provide an initial guess and potentially bounds + +3) Fitting using predefined models using the `model_fit` function : + - model_fit('predef: gaussian', x, y) + - model_fit('predef: gaussian-yoff', x, y) + - model_fit('predef: powerlaw_alpha', x, y, p0=(0.1), **fun_kwargs) + - model_fit('predef: powerlaw_u_alpha', x, y, **fun_kwargs) + - model_fit('predef: expdecay', x, y) + - model_fit('predef: weibull_pdf', x, y) + Predefined models have default values for bounds and guesses that can be overriden. + +4) Predefined fitters, wrapped with the `model_fit` function: + - model_fit('fitter: sinusoid', x, y) + - model_fit('fitter: polynomial_discrete', x, y, exponents=[0,2,4]) + - model_fit('fitter: polynomial_continuous', x, y, order=3) + Predefined fitters can handle bounds/initial guess better + +INPUTS: +-------- +All functions have the following inputs: + - x: array on the x-axis + - y: values on the y-axis (to be fitted against a model) +Additionally some functions have the following inputs: + - p0: initial values for parameters, either a string or a dict: + - string: the string is converted to a dictionary, assuming key value pairs + example: 'a=0, b=1.3' + - dictionary, then keys should corresponds to the parameters of the model + example: {'a':0, 'b':1.3} + - bounds: bounds for each parameters, either a string or a dictionary. + NOTE: pi and inf are available to set bounds + - if a string, the string is converted to a dictionary assuming key value pairs + example: 'a=(0,3), b=(-inf,pi)' + - if a dictionary, the keys should corresponds to the parameters of the model + example: {'a':(0,3), 'b':(-inf,pi)} + +OUTPUTS: +-------- +All functions returns the same outputs: + - y_fit : the fit to the y data + - pfit : the list of parameters used + - fitter: a `ModelFitter` object useful to manipulate the fit, in particular: + - fitter.model: dictionary with readable versions of the parameters, formula, + function to reevaluate the fit on a different x, etc. + - fitter.data: data used for the fit + - fitter.fit_data: perform another fit using different data + +MISC +---- +High-level fitters, predefined models or fitters can be added to this class. + +""" import numpy as np import scipy.optimize as so import scipy.stats as stats @@ -7,11 +82,115 @@ from numpy import sqrt, pi, exp, cos, sin, log, inf, arctan # for user convenience import six -__all__ = ['model_fit'] -__all__ += ['ModelFitter','ContinuousPolynomialFitter','DiscretePolynomialFitter'] -__all__ += ['fit_polynomial_continuous','fit_polynomial_discrete', 'fit_powerlaw_u_alpha'] -__all__ += ['extract_variables'] -__all__ += ['MODELS','FITTERS'] +# --------------------------------------------------------------------------------} +# --- High level fitters +# --------------------------------------------------------------------------------{ +def fit_sinusoid(x,y,physical=False): + """ Fits a sinusoid to y with formula: + if physical is False: y_fit=A*sin(omega*x+phi)+B + if physical is True: y_fit=A*sin(2*pi(f+phi/360))+B """ + y_fit, pfit, fitter = model_fit('fitter: sinusoid', x, y, physical=physical) + return y_fit, pfit, fitter + +def fit_polynomial(x, y, order=None, exponents=None): + """ Fits a polynomial to y, either: + - full up to a given order: y_fit= {a_i} x^i , i=0..order + - or using a discrete set of exponents: y_fit= {a_i} x^e[i], i=0,..len(exponents) + OPTIONAL INPUTS: + - order: integer + Maximum order of polynomial, e.g. 2: for a x**0 + b x**1 + c x**2 + - exponents: array-like + Exponents to be used. e.g. [0,2,5] for a x**0 + b x**2 + c x**5 + """ + if order is not None: + y_fit, pfit, fitter = model_fit('fitter: polynomial_continuous', x, y, order=order) + else: + y_fit, pfit, fitter = model_fit('fitter: polynomial_discrete', x, y, exponents=exponents) + return y_fit, pfit, fitter + +def fit_gaussian(x, y, offset=False): + """ Fits a gaussin to y, with the following formula: + offset is True : '1/({sigma}*sqrt(2*pi)) * exp(-1/2 * ((x-{mu})/{sigma})**2)' + offset is False: '1/({sigma}*sqrt(2*pi)) * exp(-1/2 * ((x-{mu})/{sigma})**2) + {y0}' + """ + if offset: + return model_fit('predef: gaussian-yoff', x, y) + else: + return model_fit('predef: gaussian', x, y) + +# --------------------------------------------------------------------------------} +# --- Simple mid level fitter +# --------------------------------------------------------------------------------{ +def fit_polynomial_continuous(x, y, order): + """Fit a polynomial with a continuous set of exponents up to a given order + + Parameters + ---------- + x,y: see `model_fit` + order: integer + Maximum order of polynomial, e.g. 2: for a x**0 + b x**1 + c x**2 + + Returns + ------- + see `model_fit` + """ + pfit = np.polyfit(x,y,order) + y_fit = np.polyval(pfit,x) + + # coeffs_dict, e.g. {'a':xxx, 'b':xxx}, formula = 'a*x + b' + variables = string.ascii_lowercase[:order+1] + coeffs_dict = OrderedDict([(var,coeff) for i,(coeff,var) in enumerate(zip(pfit,variables))]) + formula = ' + '.join(['{}*x**{}'.format(var,order-i) for i,var in enumerate(variables)]) + formula = _clean_formula(formula) + + return y_fit,pfit,{'coeffs':coeffs_dict,'formula':formula,'fitted_function':lambda xx : np.polyval(pfit,xx)} + +def fit_polynomial_discrete(x, y, exponents): + """Fit a polynomial with a discrete set of exponents + + Parameters + ---------- + x,y: see `model_fit` + exponents: array-like + Exponents to be used. e.g. [0,2,5] for a x**0 + b x**2 + c x**5 + + Returns + ------- + see `model_fit` + """ + #exponents=-np.sort(-np.asarray(exponents)) + X_poly=np.array([]) + for i,e in enumerate(exponents): + if i==0: + X_poly = np.array([x**e]) + else: + X_poly = np.vstack((X_poly,x**e)) + try: + pfit = np.linalg.lstsq(X_poly.T, y, rcond=None)[0] + except: + pfit = np.linalg.lstsq(X_poly.T, y) + y_fit= np.dot(pfit, X_poly) + + variables = string.ascii_lowercase[:len(exponents)] + coeffs_dict = OrderedDict([(var,coeff) for i,(coeff,var) in enumerate(zip(pfit,variables))]) + formula = ' + '.join(['{}*x**{}'.format(var,e) for var,e in zip(variables,exponents)]) + formula = _clean_formula(formula) + + return y_fit,pfit,{'coeffs':coeffs_dict,'formula':formula} + + +def fit_powerlaw_u_alpha(x, y, z_ref=100, p0=(10,0.1)): + """ + p[0] : u_ref + p[1] : alpha + """ + pfit, _ = so.curve_fit(lambda x, *p : p[0] * (x / z_ref) ** p[1], x, y, p0=p0) + y_fit = pfit[0] * (x / z_ref) ** pfit[1] + coeffs_dict=OrderedDict([('u_ref',pfit[0]),('alpha',pfit[1])]) + formula = '{u_ref} * (z / {z_ref}) ** {alpha}' + fitted_fun = lambda xx: pfit[0] * (xx / z_ref) ** pfit[1] + return y_fit, pfit, {'coeffs':coeffs_dict,'formula':formula,'fitted_function':fitted_fun} + # --------------------------------------------------------------------------------} # --- Predifined functions NOTE: they need to be registered in variable `MODELS` @@ -48,14 +227,26 @@ def weibull_pdf(x, p, z_ref=100): """ p = (A, k) formula: {k}*x**({k}-1) / {A}**{k} * np.exp(-x/{A})**{k} """, return p[1] * x ** (p[1] - 1) / p[0] ** p[1] * np.exp(-(x / p[0]) ** p[1]) +def sinusoid(x, p): + """ p = (A,omega,phi,B) """ + return p[0]*np.sin(p[1]*x+p[2]) + p[3] +def sinusoid_f(x, p): + """ p = (A,f,phi_deg,B) """ + return p[0]*np.sin(2*pi*(p[1]*x+p[2]/360)) + p[3] + def gentorque(x, p): """ + INPUTS: x: generator or rotor speed p= (RtGnSp, RtTq , Rgn2K , SlPc , SpdGenOn) RtGnSp Rated generator speed for simple variable-speed generator control (HSS side) (rpm) RtTq Rated generator torque/constant generator torque in Region 3 for simple variable-speed generator control (HSS side) (N-m) Rgn2K Generator torque constant in Region 2 for simple variable-speed generator control (HSS side) (N-m/rpm^2) SlPc Rated generator slip percentage in Region 2 1/2 for simple variable-speed generator control (%) + + OUTPUTS: + GenTrq: Generator torque [Nm] + """ # Init @@ -130,11 +321,11 @@ def gentorque(x, p): 'coeffs' :'A=1, k=1', # Order Important 'consts' :None, 'bounds' :'A=(0.1,inf), k=(0,5)'}, -# {'label':'Generator Torque', 'handle': gentorque, 'id':'predef: gentorque', -# 'formula': '{RtGnSp} , {RtTq} , {Rgn2K} , {SlPc} , {SpdGenOn}', -# 'coeffs' : 'RtGnSp=100 , RtTq=1000 , Rgn2K=0.01 ,SlPc=5 , SpdGenOn=0', # Order Important -# 'consts' :None, -# 'bounds' :'RtGnSp=(0.1,inf) , RtTq=(1,inf), Rgn2K=(0.0,0.1) ,SlPc=(0,20) , SpdGenOn=(0,inf)'} +{'label':'Generator Torque', 'handle': gentorque, 'id':'predef: gentorque', +'formula': '{RtGnSp} , {RtTq} , {Rgn2K} , {SlPc} , {SpdGenOn}', +'coeffs' : 'RtGnSp=100 , RtTq=1000 , Rgn2K=0.01 ,SlPc=5 , SpdGenOn=0', # Order Important +'consts' :None, +'bounds' :'RtGnSp=(0.1,inf) , RtTq=(1,inf), Rgn2K=(0.0,0.1) ,SlPc=(0,20) , SpdGenOn=(0,inf)'} ] # --------------------------------------------------------------------------------} @@ -146,7 +337,7 @@ def model_fit(func, x, y, p0=None, bounds=None, **fun_kwargs): ---------- func: string or function handle - function handle - - string starting with "fitter: ": (see variable _FITTER) + - string starting with "fitter: ": (see variable FITTERS) - "fitter: polynomial_continuous 5' : polyfit order 5 - "fitter: polynomial_discrete 0 2 3 ': fit polynomial of exponents 0 2 3 - string providing an expression to evaluate, e.g.: @@ -156,24 +347,42 @@ def model_fit(func, x, y, p0=None, bounds=None, **fun_kwargs): - "predef: powerlaw_all" : - "predef: gaussian " : + x: array of x values + y: array of y values + p0: initial values for parameters, either a string or a dict: + - if a string: the string is converted to a dictionary, assuming key value pairs + example: 'a=0, b=1.3' + - if a dictionary, then keys should corresponds to the parameters of the model + example: {'a':0, 'b':1.3} + bounds: bounds for each parameters, either a string or a dictionary. + NOTE: pi and inf are available to set bounds + - if a string, the string is converted to a dictionary assuming key value pairs + example: 'a=(0,3), b=(-inf,pi)' + - if a dictionary, the keys should corresponds to the parameters of the model + example: {'a':(0,3), 'b':(-inf,pi)} + Returns ------- y_fit: array with same shape as `x` fitted data. + pfit : fitted parameters fitter: ModelFitter object """ if isinstance(func,six.string_types) and func.find('fitter:')==0: + # --- This is a high level fitter, we call the class + # The info about the class are storred in the global variable FITTERS + # See e.g. SinusoidFitter, DiscretePolynomialFitter predef_fitters=[m['id'] for m in FITTERS] if func not in predef_fitters: raise Exception('Function `{}` not defined in curve_fitting module\n Available fitters: {}'.format(func,predef_fitters)) - i = predef_fitters.index(func) + i = predef_fitters.index(func) FitterDict = FITTERS[i] - consts=FITTERS[i]['consts'] + consts = FITTERS[i]['consts'] args, missing = set_common_keys(consts, fun_kwargs) if len(missing)>0: raise Exception('Curve fitting with `{}` requires the following arguments {}. Missing: {}'.format(func,consts.keys(),missing)) - + # Calling the class fitter = FitterDict['handle'](x=x, y=y, p0=p0, bounds=bounds, **fun_kwargs) else: fitter = ModelFitter(func, x, y, p0=p0, bounds=bounds, **fun_kwargs) @@ -182,6 +391,9 @@ def model_fit(func, x, y, p0=None, bounds=None, **fun_kwargs): return fitter.data['y_fit'], pfit , fitter +# --------------------------------------------------------------------------------} +# --- Main Class +# --------------------------------------------------------------------------------{ class ModelFitter(): def __init__(self,func=None, x=None, y=None, p0=None, bounds=None, **fun_kwargs): @@ -236,8 +448,6 @@ def set_model(self,func, **fun_kwargs): # Check that the formula evaluates x=np.array([1,2,5])*np.sqrt(2) # some random evaluation vector.. p=[np.sqrt(2)/4]*nParams # some random initial conditions - #print('p',p) - #print('f',formula_eval) try: y=eval(formula_eval) y=np.asarray(y) @@ -276,7 +486,7 @@ def func(x, p): if len(missing)>0: raise Exception('Curve fitting with function `{}` requires the following arguments {}. Missing: {}'.format(func.__name__,consts.keys(),missing)) - def setup_bounds(self,bounds,nParams): + def setup_bounds(self, bounds, nParams): if bounds is not None: self.model['bounds']=bounds # store in model bounds=self.model['bounds'] # usemodel bounds as default @@ -295,6 +505,7 @@ def setup_bounds(self,bounds,nParams): b1.append(bounds[k][0]) b2.append(bounds[k][1]) else: + # TODO merge default bounds raise Exception('Bounds dictionary is missing the key: `{}`'.format(k)) bounds=(b1,b2) else: @@ -312,11 +523,35 @@ def setup_bounds(self,bounds,nParams): self.model['bounds']=bounds # store in model - def setup_guess(self,p0,bounds, nParams): - """ Setup initial values p0: - - if p0 is a string (e.g. " a=1, b=3"), it's converted to a dict - - if p0 is a dict, the ordered keys of model['coeffs'] are used to sort p0 + def setup_guess(self, p0, bounds, nParams): + """ + Setup initial parameter values for the fit, based on what the user provided, and potentially the bounds + + INPUTS: + - p0: initial parameter values for the fit + - if a string (e.g. " a=1, b=3"), it's converted to a dict + - if a dict, the ordered keys of model['coeffs'] are used to sort p0 + - bounds: tuple of lower and upper bounds for each parameters. + Parameters are ordered as function of models['coeffs'] + bounds[0]: lower bounds or all parameters + bounds[1]: upper bounds or all parameters + + We can assume that the bounds are set """ + def middleOfBounds(i): + """ return middle of bounds for parameter `i`""" + bLow = bounds[0][i] + bHigh = bounds[0][2] + if (bLow,bHigh)==(-np.inf,np.inf): + p_i=0 + elif bLow==-np.inf: + p_i = -abs(bHigh)*2 + elif bHigh== np.inf: + p_i = abs(bLow)*2 + else: + p_i = (bLow+bHigh)/2 + return p_i + if isinstance(p0 ,six.string_types): p0=extract_key_num(p0) if len(p0)==0: @@ -333,14 +568,7 @@ def setup_guess(self,p0,bounds, nParams): # use middle of bounds p0 = [0]*nParams for i,(b1,b2) in enumerate(zip(bounds[0],bounds[1])): - if (b1,b2)==(-np.inf,np.inf): - p0[i]=0 - elif b1==-np.inf: - p0[i] = -abs(b2)*2 - elif b2== np.inf: - p0[i] = abs(b1)*2 - else: - p0[i] = (b1+b2)/2 + p0[i] = middleOfBounds(i) p0 = (p0) elif isinstance(p0,dict): # User supplied a dictionary, we use the ordered keys of coeffs to sort p0 @@ -355,10 +583,20 @@ def setup_guess(self,p0,bounds, nParams): else: raise NotImplementedError('Guess dictionary with no known model coeffs.') - # TODO check that p0 is within bounds if not hasattr(p0,'__len__'): p0=(p0,) + + # --- Last check that p0 is within bounds + if bounds is not None: + for p,k,lb,ub in zip(p0, self.model['coeffs'].keys(), bounds[0], bounds[1]): + if pub: + raise Exception('Parameter `{}` has the guess value {}, which is larger than the upper bound ({})'.format(k,p,ub)) + # TODO potentially set it as middle of bounds + + # --- Finally, store the initial guesses in the model self.model['coeffs_init'] = p0 def fit(self, func, x, y, p0=None, bounds=None, **fun_kwargs): @@ -384,7 +622,7 @@ def clean_data(self,x,y): def fit_data(self, x, y, p0=None, bounds=None): """ fit data, assuming a model is already setup""" if self.model['model_function'] is None: - raise Exceptioin('Call set_function first') + raise Exception('Call set_function first') # Cleaning data, and store it in object x,y=self.clean_data(x,y) @@ -458,6 +696,39 @@ def formula_num(self, fmt=None): formula_num = formula_num.replace('{'+k+'}',fmt_fun(v)) return formula_num + + + def plot(self, x=None, fig=None, ax=None): + if x is None: + x=self.data['x'] + + sFormula = _clean_formula(self.model['formula'],latex=True) + + import matplotlib.pyplot as plt + import matplotlib.patches as mpatches + + if fig is None: + fig,ax = plt.subplots(1, 1, sharey=False, figsize=(6.4,4.8)) # (6.4,4.8) + fig.subplots_adjust(left=0.12, right=0.95, top=0.95, bottom=0.11, hspace=0.20, wspace=0.20) + + ax.plot(self.data['x'], self.data['y'], '.', label='Data') + ax.plot(x, self.model['fitted_function'](x), '-', label='Model ' + sFormula) + + # Add extra info to the legend + handles, labels = ax.get_legend_handles_labels() # get existing handles and labels + empty_patch = mpatches.Patch(color='none', label='Extra label') # create a patch with no color + for k,v in self.model['coeffs'].items(): + handles.append(empty_patch) # add new patches and labels to list + labels.append(r'${:s}$ = {}'.format(pretty_param(k),pretty_num_short(v))) + handles.append(empty_patch) # add new patches and labels to list + labels.append('$R^2$ = {}'.format(pretty_num_short(self.model['R2']))) + ax.legend(handles, labels) + + + #ax.set_xlabel('') + #ax.set_ylabel('') + return fig,ax + def __repr__(self): s='<{} object> with fields:\n'.format(type(self).__name__) s+=' - data, dictionary with keys: \n' @@ -495,9 +766,9 @@ def fit_data(self, x, y, p0=None, bounds=None): nParams=self.order+1 # Bounds - self.setup_bounds(bounds,nParams) # TODO + self.setup_bounds(bounds, nParams) # TODO # Initial conditions - self.setup_guess(p0,bounds,nParams) # TODO + self.setup_guess(p0, bounds, nParams) # TODO # Fitting pfit = np.polyfit(x,y,self.order) @@ -535,9 +806,9 @@ def fit_data(self, x, y, p0=None, bounds=None): nParams=len(self.exponents) # Bounds - self.setup_bounds(bounds,nParams) # TODO + self.setup_bounds(bounds, nParams) # TODO # Initial conditions - self.setup_guess(p0,bounds,nParams) # TODO + self.setup_guess(p0, bounds, nParams) # TODO X_poly=np.array([]) for i,e in enumerate(self.exponents): @@ -562,6 +833,57 @@ def fitted_function(xx): return y self.model['fitted_function']=fitted_function + +class SinusoidFitter(ModelFitter): + def __init__(self, physical=False, x=None, y=None, p0=None, bounds=None): + ModelFitter.__init__(self, x=None, y=None, p0=p0, bounds=bounds) + #self.setOrder(int(order)) + self.physical=physical + if physical: + self.model['coeffs'] = OrderedDict([('A',1),('f',1),('phi',0),('B',0)]) + self.model['formula'] = '{A} * sin(2*pi*({f}*x + {phi}/360)) + {B}' + else: + self.model['coeffs'] = OrderedDict([('A',1),('omega',1),('phi',0),('B',0)]) + self.model['formula'] = '{A} * sin({omega}*x + {phi}) + {B}' + + if x is not None and y is not None: + self.fit_data(x,y,p0,bounds) + + def fit_data(self, x, y, p0=None, bounds=None): + # Cleaning data + x,y=self.clean_data(x,y) + + nParams=4 + + # TODO use signal + dt = x[1]-x[0] + ff = np.fft.fftfreq(len(x), (dt)) # assume uniform spacing + Fyy = abs(np.fft.fft(y)) + guess_freq = abs(ff[np.argmax(Fyy[1:])+1]) # excluding the zero frequency "peak", which is related to offset + + guess_amp = np.std(y) * 2.**0.5 + guess_offset = np.mean(y) + if self.physical: + guess = np.array([guess_amp, guess_freq, 0., guess_offset]) + minimize_me = lambda x, *p : sinusoid_f(x, p) + else: + guess = np.array([guess_amp, 2.*np.pi*guess_freq, 0., guess_offset]) + minimize_me = lambda x, *p : sinusoid(x, p) + self.model['coeffs_init'] = guess + + pfit, pcov = so.curve_fit(minimize_me, x, y, p0=guess) + + # --- Reporting information about the fit (after the fit) + # And Return a fitted function + if self.physical: + y_fit = sinusoid_f(x, pfit) + self.model['fitted_function']=lambda xx : sinusoid_f(xx, pfit) + else: + y_fit = sinusoid(x, pfit) + self.model['fitted_function']=lambda xx : sinusoid(xx, pfit) + self.store_fit_info(y_fit, pfit) + + class GeneratorTorqueFitter(ModelFitter): def __init__(self,x=None, y=None, p0=None, bounds=None): ModelFitter.__init__(self,x=None, y=None, p0=p0, bounds=bounds) @@ -665,6 +987,8 @@ def minimize_me(p): 'consts':{'order':3}, 'formula': '{a_i} x^i'}, {'label':'Polynomial (partial)','id':'fitter: polynomial_discrete' , 'handle': DiscretePolynomialFitter , 'consts':{'exponents':[0,2,3]},'formula': '{a_i} x^j'}, +{'label':'Sinusoid','id':'fitter: sinusoid' , 'handle': SinusoidFitter , +'consts':{'physical':True},'formula': '{A}*sin({omega or 2 pi f}*x+{phi or phi_deg}) + {B} '}, # {'label':'Generator Torque','id':'fitter: gentorque' , 'handle': GeneratorTorqueFitter , # 'consts':{},'formula': ''} ] @@ -698,7 +1022,7 @@ def extract_key_tuples(text): if text is None: return {} regex = re.compile(r'(?P[\w\-]+)=\((?P[0-9+epinf.-]*?),(?P[0-9+epinf.-]*?)\)($|,)') - return {match.group("key"): (np.float(match.group("value1")),np.float(match.group("value2"))) for match in regex.finditer(text.replace(' ',''))} + return {match.group("key"): (float(match.group("value1")),float(match.group("value2"))) for match in regex.finditer(text.replace(' ',''))} def extract_key_num(text): """ @@ -707,7 +1031,7 @@ def extract_key_num(text): if text is None: return {} regex = re.compile(r'(?P[\w\-]+)=(?P[0-9+epinf.-]*?)($|,)') - return OrderedDict([(match.group("key"), np.float(match.group("value"))) for match in regex.finditer(text.replace(' ',''))]) + return OrderedDict([(match.group("key"), float(match.group("value"))) for match in regex.finditer(text.replace(' ',''))]) def extract_key_miscnum(text): """ @@ -733,14 +1057,20 @@ def isint(s): if v.find('(')>=0: v=v.replace('(','').replace(')','') v=v.split(',') - vect=tuple([np.float(val) for val in v if len(val.strip())>0]) + vect=tuple([float(val) for val in v if len(val.strip())>0]) elif v.find('[')>=0: v=v.replace('[','').replace(']','') v=v.split(',') - vect=[int(val) if isint(val) else np.float(val) for val in v if len(val.strip())>0] # NOTE returning lists + vect=[int(val) if isint(val) else float(val) for val in v if len(val.strip())>0] # NOTE returning lists + elif v.find('True')>=0: + v=v.replace(',','').strip() + vect=True + elif v.find('False')>=0: + v=v.replace(',','').strip() + vect=False else: v=v.replace(',','').strip() - vect=int(v) if isint(v) else np.float(v) + vect=int(v) if isint(v) else float(v) d[k]=vect return d @@ -754,8 +1084,30 @@ def set_common_keys(dict_target, dict_source): keys_missing.append(k) return dict_target, keys_missing -def _clean_formula(s): - return s.replace('+-','-').replace('**1','').replace('*x**0','') +def _clean_formula(s, latex=False): + s = s.replace('+-','-').replace('**1','').replace('*x**0','') + s = s.replace('np.','') + if latex: + #s = s.replace('{','$').replace('}','$') + s = s.replace('phi',r'\phi') + s = s.replace('alpha',r'\alpha') + s = s.replace('beta' ,r'\alpha') + s = s.replace('mu' ,r'\mu' ) + s = s.replace('pi' ,r'\pi' ) + s = s.replace('sigma',r'\sigma') + s = s.replace('omega',r'\omega') + s = s.replace('_ref',r'_{ref}') # make this general + s = s.replace(r'(',r'{(') + s = s.replace(r')',r')}') + s = s.replace(r'**',r'^') + s = s.replace(r'*', '') + s = s.replace('sin',r'\sin') + s = s.replace('exp',r'\exp') + s = s.replace('sqrt',r'\sqrt') + s = r'$'+s+r'$' + else: + s = s.replace('{','').replace('}','') + return s def rsquare(y, f): """ Compute coefficient of determination of data fit model and RMSE @@ -778,331 +1130,52 @@ def rsquare(y, f): R2 = max(0,1-np.sum((y-f)**2)/np.sum((y-np.mean(y))** 2)) return R2 -# --------------------------------------------------------------------------------} -# --- Low level fitter -# --------------------------------------------------------------------------------{ -def fit_polynomial_continuous(x, y, order): - """Fit a polynomial with a continuous set of exponents up to a given order - - Parameters - ---------- - x,y: see `model_fit` - order: integer - Maximum order of polynomial, e.g. 2: for a x**0 + b x**1 + c x**2 - - Returns - ------- - see `model_fit` - """ - pfit = np.polyfit(x,y,order) - y_fit = np.polyval(pfit,x) - - # coeffs_dict, e.g. {'a':xxx, 'b':xxx}, formula = 'a*x + b' - variables = string.ascii_lowercase[:order+1] - coeffs_dict = OrderedDict([(var,coeff) for i,(coeff,var) in enumerate(zip(pfit,variables))]) - formula = ' + '.join(['{}*x**{}'.format(var,order-i) for i,var in enumerate(variables)]) - formula = _clean_formula(formula) - +def pretty_param(s): + if s in ['alpha','beta','delta','gamma','epsilon','lambda','mu','nu','pi','rho','sigma','phi','psi','omega']: + s = r'\{}'.format(s) + s = s.replace('_ref',r'_{ref}') # make this general.. + return s - return y_fit,pfit,{'coeffs':coeffs_dict,'formula':formula,'fitted_function':lambda xx : np.polyval(pfit,xx)} - -def fit_polynomial_discrete(x, y, exponents): - """Fit a polynomial with a discrete set of exponents - - Parameters - ---------- - x,y: see `model_fit` - exponents: array-like - Exponents to be used. e.g. [0,2,5] for a x**0 + b x**2 + c x**5 +def pretty_num(x): + if abs(x)<1000 and abs(x)>1e-4: + return "{:9.4f}".format(x) + else: + return '{:.3e}'.format(x) - Returns - ------- - see `model_fit` - """ - #exponents=-np.sort(-np.asarray(exponents)) - X_poly=np.array([]) - for i,e in enumerate(exponents): - if i==0: - X_poly = np.array([x**e]) +def pretty_num_short(x,digits=3): + if digits==4: + if abs(x)<1000 and abs(x)>1e-1: + return "{:.4f}".format(x) else: - X_poly = np.vstack((X_poly,x**e)) - try: - pfit = np.linalg.lstsq(X_poly.T, y, rcond=None)[0] - except: - pfit = np.linalg.lstsq(X_poly.T, y) - y_fit= np.dot(pfit, X_poly) - - variables = string.ascii_lowercase[:len(exponents)] - coeffs_dict = OrderedDict([(var,coeff) for i,(coeff,var) in enumerate(zip(pfit,variables))]) - formula = ' + '.join(['{}*x**{}'.format(var,e) for var,e in zip(variables,exponents)]) - formula = _clean_formula(formula) - - return y_fit,pfit,{'coeffs':coeffs_dict,'formula':formula} - - -def fit_powerlaw_u_alpha(x, y, z_ref=100, p0=(10,0.1)): - """ - p[0] : u_ref - p[1] : alpha - """ - pfit, _ = so.curve_fit(lambda x, *p : p[0] * (x / z_ref) ** p[1], x, y, p0=p0) - y_fit = pfit[0] * (x / z_ref) ** pfit[1] - coeffs_dict=OrderedDict([('u_ref',pfit[0]),('alpha',pfit[1])]) - formula = '{u_ref} * (z / {z_ref}) ** {alpha}' - fitted_fun = lambda xx: pfit[0] * (xx / z_ref) ** pfit[1] - return y_fit, pfit, {'coeffs':coeffs_dict,'formula':formula,'fitted_function':fitted_fun} - -# --------------------------------------------------------------------------------} -# --- Unittests -# --------------------------------------------------------------------------------{ -import unittest - -class TestFitting(unittest.TestCase): - - def test_gaussian(self): - mu,sigma=0.5,1.2 - x=np.linspace(0,1,10) - y=gaussian(x,(mu,sigma)) - y_fit, pfit, fitter = model_fit('predef: gaussian', x, y) - np.testing.assert_array_almost_equal(y,y_fit) - np.testing.assert_almost_equal(mu ,fitter.model['coeffs']['mu']) - np.testing.assert_almost_equal(sigma,fitter.model['coeffs']['sigma']) - - def test_gaussian_w_offset(self): - mu,sigma,y0=0.5,1.2,10 - x=np.linspace(0,1,10) - y=gaussian_w_offset(x,(mu,sigma,y0)) - y_fit, pfit, fitter = model_fit('predef: gaussian-yoff', x, y) - np.testing.assert_array_almost_equal(y,y_fit) - np.testing.assert_almost_equal(mu ,fitter.model['coeffs']['mu']) - np.testing.assert_almost_equal(sigma,fitter.model['coeffs']['sigma']) - np.testing.assert_almost_equal(y0 ,fitter.model['coeffs']['y0']) - - def test_powerlaw_alpha(self): - u_ref,z_ref,alpha=20,12,0.12 - x = np.linspace(0,1,10) - y=powerlaw_all(x,(alpha,u_ref,z_ref)) - - fun_kwargs = {'u_ref':u_ref,'z_ref':z_ref} - y_fit, pfit, fitter = model_fit('predef: powerlaw_alpha', x, y, p0=(0.1), **fun_kwargs) - np.testing.assert_array_almost_equal(y,y_fit) - np.testing.assert_almost_equal(alpha ,fitter.model['coeffs']['alpha']) - - def test_powerlaw_u_alpha(self): - u_ref,z_ref,alpha=10,12,0.12 - x = np.linspace(0,1,10) - y=powerlaw_all(x,(alpha,u_ref,z_ref,alpha)) - - fun_kwargs = {'z_ref':z_ref} - y_fit, pfit, fitter = model_fit('predef: powerlaw_u_alpha', x, y, **fun_kwargs) - np.testing.assert_array_almost_equal(y,y_fit) - np.testing.assert_almost_equal(alpha ,fitter.model['coeffs']['alpha']) - np.testing.assert_almost_equal(u_ref ,fitter.model['coeffs']['u_ref']) - -# def test_powerlaw_all(self): -# u_ref,z_ref,alpha=10,12,0.12 -# x = np.linspace(0,1,10) -# y=powerlaw_all(x,(alpha,u_ref,z_ref,alpha)) -# -# y_fit, pfit, fitter = model_fit('predef: powerlaw_all', x, y) -# np.testing.assert_array_almost_equal(y,y_fit) -# np.testing.assert_almost_equal(alpha ,fitter.model['coeffs']['alpha']) -# # NOTE: cannot test for u_ref or z - - def test_expdecay(self): - A,k,B=0.5,1.2,10 - x=np.linspace(0,1,10) - y=expdecay(x,(A,k,B)) - y_fit, pfit, fitter = model_fit('predef: expdecay', x, y) - np.testing.assert_array_almost_equal(y,y_fit) - np.testing.assert_almost_equal(A,fitter.model['coeffs']['A']) - np.testing.assert_almost_equal(k,fitter.model['coeffs']['k']) - np.testing.assert_almost_equal(B,fitter.model['coeffs']['B']) - - def test_weibull(self): - A, k = 10, 2.3, - x=np.linspace(0,1,10) - y=weibull_pdf(x,(A,k)) - y_fit, pfit, fitter = model_fit('predef: weibull_pdf', x, y) - np.testing.assert_array_almost_equal(y,y_fit) - np.testing.assert_almost_equal(A,fitter.model['coeffs']['A'],5) - np.testing.assert_almost_equal(k,fitter.model['coeffs']['k']) - - def test_gentorque(self): - pass # TODO -# GBRatio= 27.5647 #; % Gearbox ratio (-) -# SpdGenOn = 14*GBRatio# -# RtGnSp = 1207.61 # % Rated generator speed for simple variable-speed generator control (HSS side) (rpm) -# RtTq = 1790.49 # % Rated generator torque/constant generator torque in Region 3 for simple variable-speed generator control (HSS side) (N-m) -# Rgn2K = 0.0004128 # % Generator torque constant in Region 2 for simple variable-speed generator control (HSS side) (N-m/rpm^2) -# SlPc = 6 # % Rated generator slip percentage in Region 2 1/2 for simple variable-speed generator control (%) -# # x=np.linspace(300,1500,100) -# x=np.linspace(300,1000,100) -# y=gentorque(x, (RtGnSp, RtTq , Rgn2K , SlPc , SpdGenOn)) -# -# bounds='RtGnSp=(1200,1300) , RtTq=(1500,1800), Rgn2K=(0.0,0.01) ,SlPc=(0,20) , SpdGenOn=(10,500)' -# p0 = [1250, 1700,0.001, 10, 50] -# y_fit, pfit, fitter = model_fit('fitter: gentorque', x, y) -# -# y_fit, pfit, fitter = model_fit('predef: gentorque', x, y, bounds=bounds, p0=p0) -# # np.testing.assert_array_almost_equal(y,y_fit) -# print(fitter) -# import matplotlib.pyplot as plt -# -# fig,ax = plt.subplots(1, 1, sharey=False, figsize=(6.4,4.8)) # (6.4,4.8) -# fig.subplots_adjust(left=0.12, right=0.95, top=0.95, bottom=0.11, hspace=0.20, wspace=0.20) -# ax.plot(x, y ,'o', label='') -# ax.plot(x, y_fit ,'-', label='') -# ax.plot(x, fitter.model['fitted_function'](x) ,'.', label='') -# ax.set_xlabel('') -# ax.set_ylabel('') -# ax.legend() -# ax.tick_params(direction='in') -# plt.show() - - def test_polycont(self): - k = 2.0 - x = np.linspace(0,1,10) - y = k * x**3 - y_fit, pfit, fitter = model_fit('fitter: polynomial_continuous', x, y, order=3) - np.testing.assert_array_almost_equal(y,y_fit) - np.testing.assert_almost_equal(k ,fitter.model['coeffs']['a']) - np.testing.assert_almost_equal(0 ,fitter.model['coeffs']['b']) - np.testing.assert_almost_equal(0 ,fitter.model['coeffs']['c']) - np.testing.assert_almost_equal(0 ,fitter.model['coeffs']['d']) - - def test_polydisc(self): - exponents=[0,3,5] - a,b,c = 2.0, 3.0, 4.0 - x = np.linspace(0,1,10) - y = a + b*x**3 + c*x**5 - y_fit, pfit, fitter = model_fit('fitter: polynomial_discrete', x, y, exponents=exponents) - np.testing.assert_array_almost_equal(y,y_fit) - np.testing.assert_almost_equal(a ,fitter.model['coeffs']['a']) - np.testing.assert_almost_equal(b ,fitter.model['coeffs']['b']) - np.testing.assert_almost_equal(c ,fitter.model['coeffs']['c']) - - def test_evalpoly(self): - exponents=[0,3,5] - a,b,c = 2.0, 3.0, 4.0 - x = np.linspace(0,1,10) - y = a + b*x**3 + c*x**5 - y_fit, pfit, fitter = model_fit('eval: {a} + {b}*x**3 + {c}*x**5', x, y) - np.testing.assert_array_almost_equal(y,y_fit) - np.testing.assert_almost_equal(a ,fitter.model['coeffs']['a']) - np.testing.assert_almost_equal(b ,fitter.model['coeffs']['b']) - np.testing.assert_almost_equal(c ,fitter.model['coeffs']['c']) - - def test_evalpowerlaw(self): - u_ref,z_ref,alpha=10,12,0.12 - x = np.linspace(0,1,10) - y=powerlaw_all(x,(alpha,u_ref,z_ref)) - y_fit, pfit, fitter = model_fit('eval: {u_ref}*(x/{z_ref})**{alpha}', x, y, p0=(8,9,0.1), bounds=(0.001,100)) - np.testing.assert_array_almost_equal(y,y_fit) - - def test_lowlevelpoly(self): - x=np.linspace(0,1,10) - y=x**2 - exponents=[0,1,2] - y_fit, pfit, model = fit_polynomial_discrete(x, y, exponents) - np.testing.assert_array_almost_equal(y,y_fit) - np.testing.assert_almost_equal(1 , model['coeffs']['c']) - np.testing.assert_almost_equal(0 , model['coeffs']['a']) - - y_fit, pfit, model = fit_polynomial_continuous(x, y, 3) - np.testing.assert_array_almost_equal(y,y_fit) - np.testing.assert_almost_equal(1 , model['coeffs']['b']) - np.testing.assert_almost_equal(0 , model['coeffs']['a']) - - def test_lowlevelpowerlaw(self): - u_ref,z_ref,alpha=10,12,0.12 - x = np.linspace(0,1,10) - y=powerlaw_all(x,(alpha,u_ref,z_ref)) - - y_fit, pfit, model = fit_powerlaw_u_alpha(x, y, z_ref=z_ref, p0=(9,0.1)) - np.testing.assert_array_almost_equal(y,y_fit) - np.testing.assert_almost_equal(alpha , model['coeffs']['alpha']) - np.testing.assert_almost_equal(u_ref , model['coeffs']['u_ref']) - -# def test_debug(self): -# # --- Try Gaussian -# x=np.linspace(0,1,10) -# y=gaussian(x,(0.5,1.2)) -# y_fit, pfit, fitter = model_fit('predef: gaussian', x, y) #, p0=(0,1)) -# # fitter = ModelFitter('eval: {a}*(1.0/{b}+2/0)**{c}', x, y, p0=(8,9,0.1)) -# # fitter = ModelFitter('eval: {a}/x', x, y, p0=(8,9,0.1)) -# -# # --- Plot -# y_fit=fitter.data['y_fit'] -# print(fitter) -# -# import matplotlib.pyplot as plt -# fig,ax = plt.subplots(1, 1, sharey=False, figsize=(6.4,4.8)) # (6.4,4.8) -# fig.subplots_adjust(left=0.12, right=0.95, top=0.95, bottom=0.11, hspace=0.20, wspace=0.20) -# ax.plot(x, y ,'o', label='') -# ax.plot(x, y_fit ,'-', label='') -# ax.plot(x, fitter.model['fitted_function'](x) ,'.', label='') -# ax.set_xlabel('') -# ax.set_ylabel('') -# ax.legend() -# ax.tick_params(direction='in') -# plt.show() - - def test_extract_var(self): - var, _ = extract_variables('{a}*x + {b}') - self.assertEqual(var,['a','b']) - - var, _ = extract_variables('{BB}*x + {a}*{BB}') - self.assertEqual(var,['BB','a']) - - var, _ = extract_variables('{a}*x + {{b}}') #< TODO Won't work - #self.assertEqual(var,['a','b']) - - def test_key_tuples(self): - self.assertEqual(extract_key_tuples('a=(1,2)'),{'a':(1,2)}) - - self.assertEqual(extract_key_tuples('a=(1, 2),b =(inf,0),c= ( -inf , 0.3e+10)'),{'a':(1,2),'b':(inf,0),'c':(-inf,0.3e+10)}) - - def test_key_num(self): - self.assertEqual(extract_key_num('a=2'),OrderedDict({'a':2})) - self.assertEqual(extract_key_num('all=0.1,b =inf, c= -0.3e+10'),OrderedDict({'all':0.1,'b':inf,'c':-0.3e+10})) - - def test_key_misc(self): - self.assertEqual(extract_key_miscnum('a=2'),{'a':2}) - - #np.testing.assert_almost_equal(d['a'],(2,3)) - d=extract_key_miscnum('a=(2,3)') - self.assertEqual(d['a'],(2,3)) - d=extract_key_miscnum('a=[2,3]') - np.testing.assert_almost_equal(d['a'],[2,3]) - - d=extract_key_miscnum('a=[2,3],b=3,c=(0,)') - np.testing.assert_almost_equal(d['a'],[2,3]) - self.assertEqual(d['b'],3) - self.assertEqual(d['c'],(0,)) - - + return "{:.4e}".format(x) + elif digits==3: + if abs(x)<1000 and abs(x)>1e-1: + return "{:.3f}".format(x) + else: + return "{:.3e}".format(x) + elif digits==2: + if abs(x)<1000 and abs(x)>1e-1: + return "{:.2f}".format(x) + else: + return "{:.2e}".format(x) if __name__ == '__main__': -# TestFitting().test_debug() -# TestFitting().test_gentorque() - -# # Writing example models to file -# a,b,c = 2.0, 3.0, 4.0 -# u_ref,z_ref,alpha=10,12,0.12 -# mu,sigma=0.5,1.2 -# x = np.linspace(0.1,30,20) -# A,k,B=0.5,1.2,10 -# y_exp=expdecay(x,(A,k,B)) -# A, k = 10, 2.3, -# y_weib=weibull_pdf(x,(A,k)) -# y_log=logarithmic(x,(a,b)) -# exponents=[0,3,5] -# y_poly = a + b*x**3 + c*x**5 -# y_power=powerlaw_all(x,(alpha,u_ref,z_ref)) -# y_gauss=gaussian(x,(mu,sigma)) -# M=np.column_stack((x,y_poly,y_power,y_gauss,y_gauss+10,y_weib,y_exp,y_log)) -# np.savetxt('../TestFit.csv',M,header='x,poly,power,gauss,gauss_off,weib,expdecay,log',delimiter=',') -# - unittest.main() + # --- Writing example models to file for pyDatView tests + a,b,c = 2.0, 3.0, 4.0 + u_ref,z_ref,alpha=10,12,0.12 + mu,sigma=0.5,1.2 + x = np.linspace(0.1,30,20) + A,k,B=0.5,1.2,10 + y_exp=expdecay(x,(A,k,B)) + A, k = 10, 2.3, + y_weib=weibull_pdf(x,(A,k)) + y_log=logarithmic(x,(a,b)) + exponents=[0,3,5] + y_poly = a + b*x**3 + c*x**5 + y_power=powerlaw_all(x,(alpha,u_ref,z_ref)) + y_gauss=gaussian(x,(mu,sigma)) + A= 101; B= -200.5; omega = 0.4; phi = np.pi/3 + y_sin=sinusoid(x,(A,omega,phi,B)) + np.random.normal(0, 0.1, len(x)) + M=np.column_stack((x,y_poly,y_power,y_gauss,y_gauss+10,y_weib,y_exp,y_log,y_sin)) + np.savetxt('../TestFit.csv',M,header='x,poly,power,gauss,gauss_off,weib,expdecay,log,sin',delimiter=',') diff --git a/pydatview/tools/damping.py b/pydatview/tools/damping.py index 9127752..0264935 100644 --- a/pydatview/tools/damping.py +++ b/pydatview/tools/damping.py @@ -15,9 +15,9 @@ def indexes(y, thres=0.3, min_dist=1, thres_abs=False): ---------- y : ndarray (signed) 1D amplitude data to search for peaks. - thres : float between [0., 1.] - Normalized threshold. Only the peaks with amplitude higher than the + thres : float, defining threshold. Only the peaks with amplitude higher than the threshold will be detected. + if thres_abs is False: between [0., 1.], normalized threshold. min_dist : int Minimum distance between each detected peak. The peak with the highest amplitude is preferred to satisfy this constraint. diff --git a/pydatview/tools/fatigue.py b/pydatview/tools/fatigue.py index 973d142..3c53d9d 100644 --- a/pydatview/tools/fatigue.py +++ b/pydatview/tools/fatigue.py @@ -98,7 +98,7 @@ def rainflow_windap(signal, levels=255., thresshold=(255 / 50)): if np.nanmax(signal) > 0: gain = np.nanmax(signal) / levels signal = signal / gain - signal = np.round(signal).astype(np.int) + signal = np.round(signal).astype(int) # If possible the module is compiled using cython otherwise the python implementation is used @@ -468,7 +468,7 @@ def peak_trough(x, R): #cpdef np.ndarray[long,ndim=1] peak_trough(np.ndarray[lo MINZO = 1 MAXZO = 2 ENDZO = 3 - S = np.zeros(x.shape[0] + 1, dtype=np.int) + S = np.zeros(x.shape[0] + 1, dtype=int) L = x.shape[0] goto = BEGIN diff --git a/pydatview/tools/signal.py b/pydatview/tools/signal.py index 790793b..fd04b0e 100644 --- a/pydatview/tools/signal.py +++ b/pydatview/tools/signal.py @@ -52,6 +52,13 @@ def reject_outliers(y, x=None, m = 2., replaceNaN=True): # --- Resampling # --------------------------------------------------------------------------------{ def multiInterp(x, xp, fp, extrap='bounded'): + """ + Interpolate all the columns of a matrix `fp` based on new values `x` + INPUTS: + - x : array ( n ), new values + - xp : array ( np ), old values + - fp : array ( np x ncol), matrix values to be interpolated + """ j = np.searchsorted(xp, x) - 1 dd = np.zeros(len(x)) bOK = np.logical_and(j>=0, j< len(xp)-1) @@ -274,7 +281,7 @@ def zero_crossings(y,x=None,direction=None): # --------------------------------------------------------------------------------} -# --- +# --- Correlation # --------------------------------------------------------------------------------{ def correlation(x, nMax=80, dt=1, method='manual'): """ @@ -291,13 +298,15 @@ def correlation(x, nMax=80, dt=1, method='manual'): return R, tau -def correlated_signal(coeff, n=1000): +def correlated_signal(coeff, n=1000, seed=None): """ Create a correlated random signal of length `n` based on the correlation coefficient `coeff` value[t] = coeff * value[t-1] + (1-coeff) * random """ if coeff<0 or coeff>1: raise Exception('Correlation coefficient should be between 0 and 1') + if seed is not None: + np.random.seed(seed) x = np.zeros(n) rvec = rand(n) @@ -308,6 +317,217 @@ def correlated_signal(coeff, n=1000): return x +def find_time_offset(t, f, g, outputAll=False): + """ + Find time offset between two signals (may be negative) + + t_offset = find_time_offset(t, f, g) + f(t+t_offset) ~= g(t) + + """ + import scipy + from scipy.signal import correlate + # Remove mean and normalize by std + f = f.copy() + g = g.copy() + f -= f.mean() + g -= g.mean() + f /= f.std() + g /= g.std() + + # Find cross-correlation + xcorr = correlate(f, g) + + # Lags + n = len(f) + dt = t[1]-t[0] + lag = np.arange(1-n, n)*dt + + # Time offset is located at maximum correlation + t_offset = lag[xcorr.argmax()] + + if outputAll: + return t_offset, lag, xcorr + else: + return t_offset + +def sine_approx(t, x, method='least_square'): + """ + Sinusoidal approximation of input signal x + """ + if method=='least_square': + from welib.tools.curve_fitting import fit_sinusoid + y_fit, pfit, fitter = fit_sinusoid(t, x) + omega = fitter.model['coeffs']['omega'] + A = fitter.model['coeffs']['A'] + phi = fitter.model['coeffs']['phi'] + x2 = y_fit + else: + raise NotImplementedError() + + + return x2, omega, A, phi + + +# --------------------------------------------------------------------------------} +# --- Convolution +# --------------------------------------------------------------------------------{ +def convolution_integral(time, f, g): + """ + Compute convolution integral: + f * g = \int 0^t f(tau) g(t-tau) dtau = g * f + For now, only works for uniform time vector, an exception is raised otherwise + """ + dt = time[1]-time[0] + if len(np.unique(np.around(np.diff(time)/dt,3)))>1: + raise Exception('Convolution integral implemented for uniform time vector') + + return np.convolve(f.ravel(), g.ravel() )[:len(time)]*dt + + +# --------------------------------------------------------------------------------} +# --- Intervals/peaks +# --------------------------------------------------------------------------------{ +def intervals(b, min_length=1, forgivingJump=True, removeSmallRel=True, removeSmallFact=0.1, mergeCloseRel=False, mergeCloseFact=0.2): + """ + Describe intervals from a boolean vector where intervals are indicated by True + + INPUT: + - b : a logical vector, where 1 means, I'm in an interval. + - min_length: if provided, do not return intervals of length < min_length + - forgivingJump: if true, merge intervals that are separated by a distance < min_length + - removeSmallRel: remove intervals that have a small length compared to the max length of intervals + - removeSmallFact: factor used for removeSmallRel + - mergeCloseRel: merge intervals that are closer than a fraction of the typical distance between intervals + + OUTPUTS: + - IStart : ending indices + - IEnd : ending indices + - Length: interval lenghts (IEnd-IStart+1) + + IStart, IEnd, Lengths = intervals([False, True, True, False, True, True, True, False]) + np.testing.assert_equal(IStart , np.array([1,4])) + np.testing.assert_equal(IEnd , np.array([2,6])) + np.testing.assert_equal(Lengths, np.array([2,3])) + """ + b = np.asarray(b) + total = np.sum(b) + + min_length=max(min_length,1) + if forgivingJump: + min_jump=min_length + else: + min_jump=1 + + if total==0: + IStart = np.array([]) + IEnd = np.array([]) + Lengths= np.array([]) + return IStart, IEnd, Lengths + elif total==1: + i = np.where(b)[0][0] + IStart = np.array([i]) + IEnd = np.array([i]) + Lengths= np.array([1]) + else: + n = len(b) + Idx = np.arange(n)[b] + delta_Idx=np.diff(Idx) + jumps =np.where(delta_Idx>min_jump)[0] + if len(jumps)==0: + IStart = np.array([Idx[0]]) + IEnd = np.array([Idx[-1]]) + else: + istart=Idx[0] + jumps=np.concatenate(([-1],jumps,[len(Idx)-1])) + IStart = Idx[jumps[:-1]+1] # intervals start right after a jump + IEnd = Idx[jumps[1:]] # intervals stops at jump + Lengths = IEnd-IStart+1 + + # Removing intervals smaller than min_length + bKeep = Lengths>=min_length + IStart = IStart[bKeep] + IEnd = IEnd[bKeep] + Lengths = Lengths[bKeep] + # Removing intervals smaller than less than a fraction of the max interval + if removeSmallRel: + bKeep = Lengths>=removeSmallFact*np.max(Lengths) + IStart = IStart[bKeep] + IEnd = IEnd[bKeep] + Lengths = Lengths[bKeep] + + # Distances between intervals + if mergeCloseRel: + if len(IStart)<=2: + pass + else: + D = IStart[1:]-IEnd[0:-1] + #print('D',D,np.max(D),int(np.max(D) * mergeCloseFact)) + min_length = max(int(np.max(D) * mergeCloseFact), min_length) + if min_length<=1: + pass + else: + #print('Readjusting min_length to {} to accomodate for max interval spacing of {:.0f}'.format(min_length, np.mean(D))) + return intervals(b, min_length=min_length, forgivingJump=True, removeSmallRel=removeSmallRel, removeSmallFact=removeSmallFact, mergeCloseRel=False) + return IStart, IEnd, Lengths + +def peaks(x, threshold=0.3, threshold_abs=True, method='intervals', min_length=3, + mergeCloseRel=True, returnIntervals=False): + """ + Find peaks in a signal, above a given threshold + INPUTS: + - x : 1d-array, signal + - threshold : scalar, absolute or relative threshold beyond which peaks are looked for + relative threshold are proportion of the max-min of the signal (between 0-1) + - threshold_abs : boolean, specify whether the threshold is absolute or relative + - method : string, selects which method is used to find the peaks, between: + - 'interval' : one peak per interval above the threshold + - 'derivative': uses derivative to find maxima, may return more than one per interval + - min_length: + - if 'interval' method is used: minimum interval + - if 'derivative' method is used: minimum distance between two peaks + + OPTIONS for interval method: + - mergeCloseRel: logical, if True, attempts to merge intervals that are close to each other compare to the typical interval spacing + set to False if all peaks are wanted + - returnIntervals: logical, if true, return intervals used for interval method + OUTPUTS: + - I : index of the peaks + -[IStart, IEnd] if return intervals is true, see function `intervals` + """ + if not threshold_abs: + threshold = threshold * (np.max(y) - np.min(y)) + np.min(y) + + if method =='intervals': + IStart, IEnd, Lengths = intervals(x>threshold, min_length=min_length, mergeCloseRel=mergeCloseRel) + I = np.array([iS if L==1 else np.argmax(x[iS:iE+1])+iS for iS,iE,L in zip(IStart,IEnd,Lengths)]) + if returnIntervals: + return I, IStart, IEnd + else: + return I + + elif method =='derivative': + I = indexes(x, thres=threshold, thres_abs=True, min_dist=min_length) + return I + else: + raise NotImplementedError('Method {}'.format(method)) + + + +# --------------------------------------------------------------------------------} +# --- Simple signals +# --------------------------------------------------------------------------------{ +def step(time, tStep=0, valueAtStep=0, amplitude=1): + """ + returns a step function: + 0 if ttStep + valueAtStep if t==tStep + """ + return np.heaviside(time-tStep, valueAtStep)*amplitude + + + if __name__=='__main__': import numpy as np import matplotlib.pyplot as plt diff --git a/pydatview/tools/spectral.py b/pydatview/tools/spectral.py index 59471d8..1c0f403 100644 --- a/pydatview/tools/spectral.py +++ b/pydatview/tools/spectral.py @@ -587,7 +587,7 @@ def fnextpow2(x): if nperseg is None: if noverlap is None: overlap_frac=0.5 - elif noverlap is 0: + elif noverlap == 0: overlap_frac=0 else: raise NotImplementedError('TODO noverlap set but not nperseg') From 9c861ac4d94f38c32a78a3993b1efff7698e3ad2 Mon Sep 17 00:00:00 2001 From: Emmanuel Branlard Date: Sat, 31 Jul 2021 11:04:56 -0600 Subject: [PATCH 02/36] Tools: curve fitting on selected window only (Closes #101) --- pydatview/GUITools.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/pydatview/GUITools.py b/pydatview/GUITools.py index 7b90ca7..bf3e33d 100644 --- a/pydatview/GUITools.py +++ b/pydatview/GUITools.py @@ -974,6 +974,10 @@ def onCurveFit(self,event=None): Error(self,'Curve fitting tool only works with a single curve. Plot less data.') return PD =self.parent.plotData[0] + ax =self.parent.fig.axes[0] + # Restricting data to axes visible bounds on the x axis + xlim= ax.get_xlim() + b=np.logical_and(PD.x>=xlim[0], PD.x<=xlim[1]) iModel = self.cbModels.GetSelection() d = self.Models[iModel] @@ -998,7 +1002,7 @@ def onCurveFit(self,event=None): #print('>>> Model fit bounds:',bounds ) #print('>>> Model fit kwargs:',fun_kwargs) # Performing fit - y_fit, pfit, fitter = model_fit(sFunc, PD.x, PD.y, p0=p0, bounds=bounds,**fun_kwargs) + y_fit, pfit, fitter = model_fit(sFunc, PD.x[b], PD.y[b], p0=p0, bounds=bounds,**fun_kwargs) formatter = lambda x: pretty_num_short(x, digits=3) formula_num = fitter.formula_num(fmt=formatter) @@ -1019,10 +1023,10 @@ def onCurveFit(self,event=None): # Plot ax=self.parent.fig.axes[0] - ax.plot(PD.x,y_fit,'o', ms=4) + ax.plot(PD.x[b],y_fit,'o', ms=4) self.parent.canvas.draw() - self.x=PD.x + self.x=PD.x[b] self.y_fit=y_fit self.sx=PD.sx self.sy=PD.sy From 5b67ef9a670e7367d94b4243423357fa21f4e645 Mon Sep 17 00:00:00 2001 From: Emmanuel Branlard Date: Sat, 31 Jul 2021 11:06:06 -0600 Subject: [PATCH 03/36] Bug fix: warning list had incorrect variable name in tables --- pydatview/Tables.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pydatview/Tables.py b/pydatview/Tables.py index e77525b..adc4b40 100644 --- a/pydatview/Tables.py +++ b/pydatview/Tables.py @@ -52,7 +52,7 @@ def load_tables_from_files(self, filenames=[], fileformat=None, bAdd=False): warnList=[] for f in filenames: if f in self.unique_filenames: - warn.append('Warn: Cannot add a file already opened ' + f) + warnList.append('Warn: Cannot add a file already opened ' + f) elif len(f)==0: pass # warn+= 'Warn: an empty filename was skipped' +'\n' From d34d627ae6e281002749ab99d2f68c91dd94660f Mon Sep 17 00:00:00 2001 From: Emmanuel Branlard Date: Sat, 31 Jul 2021 11:15:26 -0600 Subject: [PATCH 04/36] GH: starting script --- .github/workflows/tests.yml | 113 ++++++++++++++++++++++++++++++++++++ README.md | 1 + 2 files changed, 114 insertions(+) create mode 100644 .github/workflows/tests.yml diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 0000000..2ecaa6f --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,113 @@ + +name: 'Tests' + +on: + push: + + pull_request: + types: [opened, synchronize] #labeled, assigned] + +jobs: + build-and-test: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [3.6, 3.7, 3.8] # 2.7, + + steps: + - name: Checkout + uses: actions/checkout@v2 + with: + submodules: recursive + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + - name: Install system dependencies + run: sudo apt-get install nsis + + - name: Install python dependencies + run: | + python -m pip install --upgrade pip + pip install -r _tools/travis_requirements.txt + pip install -r weio/requirements.txt + + - name: Repo info + id: repo_info + run: | + echo ::set-output name=SOURCE_NAME::${GITHUB_REF#refs/*/} + echo ::set-output name=SOURCE_BRANCH::${GITHUB_REF#refs/heads/} + echo ::set-output name=SOURCE_TAG::${GITHUB_REF#refs/tags/} + + - name: System info + run: | + echo "SOURCE_NAME: $SOURCE_NAME" + echo "SOURCE_BRANCH: $SOURCE_BRANCH" + echo "SOURCE TAG: $SOURCE_TAG" + echo "Actor : $GITHUB_ACTOR" + echo "Branch: $GITHUB_REF" + pip list + ls + env: + SOURCE_NAME: ${{ steps.rep_info.outputs.SOURCE_NAME }} + SOURCE_BRANCH: ${{ steps.rep_info.outputs.SOURCE_BRANCH }} + SOURCE_TAG: ${{ steps.rep_info.outputs.SOURCE_TAG }} + + - name: Tests + run: | + make test + + + - name: Before deploy + if: ${{ matrix.python-version == '3.6'}} + run: | + git fetch --unshallow + export VERSION_NAME=`git describe | sed 's/\(.*\)-.*/\1/'` + export FULL_VERSION_NAME="version $VERSION_NAME" + echo "VERSION_NAME $VERSION_NAME" + echo "FULL_VERSION_NAME $FULL_VERSION_NAME" + if [[ "$SOURCE_TAG" == "" ]]; then export SOURCE_TAG="vdev" ; fi + echo "SOURCE TAG: $SOURCE_TAG" + if [[ "$SOURCE_TAG" == "vdev" ]]; then export VERSION_NAME=$VERSION_NAME"-dev" ; fi + if [[ "$SOURCE_TAG" == "vdev" ]]; then export FULL_VERSION_NAME="latest dev. version $VERSION_NAME" ; fi + echo "VERSION_NAME $VERSION_NAME" + echo "FULL_VERSION_NAME $FULL_VERSION_NAME" + pip install pynsist + pip install distlib + git clone https://github.com/takluyver/pynsist + mv pynsist/nsist nsist + make installer + mv build/nsis/pyDatView.exe "pyDatView_"$VERSION_NAME"_setup.exe" + mv _tools/pyDatView.cmd build/nsis/ + mv _tools/pyDatView.exe build/nsis/ + mv build/nsis build/pyDatView_$VERSION_NAME + cd build && zip -r "../pyDatView_"$VERSION_NAME"_portable.zip" pyDatView_$VERSION_NAME + cd .. + ls + env: + SOURCE_NAME: ${{ steps.rep_info.outputs.SOURCE_NAME }} + SOURCE_BRANCH: ${{ steps.rep_info.outputs.SOURCE_BRANCH }} + SOURCE_TAG: ${{ steps.rep_info.outputs.SOURCE_TAG }} + + - name: Before deploy + if: ${{ matrix.python-version == '3.6'}} + run: | + ls +# deploy: +# provider: releases +# api_key: $GITHUB_TOKEN +# file_glob: true +# overwrite: true +# skip_cleanup: true +# file: +# - pyDatView*.exe +# - pyDatView*.zip +# name: $FULL_VERSION_NAME +# target_commitish: $TRAVIS_COMMIT +# tag_name: $TRAVIS_TAG +# on: +# tags: true +# branch: master +# condition: $TRAVIS_PYTHON_VERSION = 3.6 diff --git a/README.md b/README.md index 4e5c053..ee487fa 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,4 @@ +[![Build status](https://github.com/ebranlard/pyDatView/workflows/Tests/badge.svg)](https://github.com/ebranlard/pyDatView/actions?query=workflow%3A%22Tests%22) [![Build Status](https://travis-ci.com/ebranlard/pyDatView.svg?branch=master)](https://travis-ci.com/ebranlard/pyDatView) Donate just a small amount, buy me a coffee! From 7b3b524f310e8adef8b388a327183a31d91f77ba Mon Sep 17 00:00:00 2001 From: Emmanuel Branlard Date: Sat, 31 Jul 2021 11:53:57 -0600 Subject: [PATCH 05/36] GH: deploy --- .github/workflows/tests.yml | 80 +++++++++++++++++++++++++++---------- 1 file changed, 60 insertions(+), 20 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 2ecaa6f..de6c160 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -12,7 +12,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [3.6, 3.7, 3.8] # 2.7, + python-version: [3.6] #, 3.7, 3.8] # 2.7, steps: - name: Checkout @@ -39,9 +39,13 @@ jobs: run: | echo ::set-output name=SOURCE_NAME::${GITHUB_REF#refs/*/} echo ::set-output name=SOURCE_BRANCH::${GITHUB_REF#refs/heads/} - echo ::set-output name=SOURCE_TAG::${GITHUB_REF#refs/tags/} + echo ::set-output name=SOURCE_TAG::${GITHUB_REF/refs\/tags\//} - name: System info + env: + SOURCE_NAME: ${{ steps.repo_info.outputs.SOURCE_NAME }} + SOURCE_BRANCH: ${{ steps.repo_info.outputs.SOURCE_BRANCH }} + SOURCE_TAG: ${{ steps.repo_info.outputs.SOURCE_TAG }} run: | echo "SOURCE_NAME: $SOURCE_NAME" echo "SOURCE_BRANCH: $SOURCE_BRANCH" @@ -50,30 +54,58 @@ jobs: echo "Branch: $GITHUB_REF" pip list ls - env: - SOURCE_NAME: ${{ steps.rep_info.outputs.SOURCE_NAME }} - SOURCE_BRANCH: ${{ steps.rep_info.outputs.SOURCE_BRANCH }} - SOURCE_TAG: ${{ steps.rep_info.outputs.SOURCE_TAG }} - name: Tests run: | make test - - - name: Before deploy - if: ${{ matrix.python-version == '3.6'}} + - name: Versioning + id: versioning run: | git fetch --unshallow export VERSION_NAME=`git describe | sed 's/\(.*\)-.*/\1/'` export FULL_VERSION_NAME="version $VERSION_NAME" + echo "VERSION TAG attemps" + export VERSION_TAG=${GITHUB_REF/refs\/tags\//} + if [[ $GITHUB_REF != *"tags" ]]; then export VERSION_TAG="Yep it has tag" ; fi + if [[ $GITHUB_REF != *"tags" ]]; then export VERSION_TAG="" ; fi echo "VERSION_NAME $VERSION_NAME" echo "FULL_VERSION_NAME $FULL_VERSION_NAME" - if [[ "$SOURCE_TAG" == "" ]]; then export SOURCE_TAG="vdev" ; fi - echo "SOURCE TAG: $SOURCE_TAG" - if [[ "$SOURCE_TAG" == "vdev" ]]; then export VERSION_NAME=$VERSION_NAME"-dev" ; fi - if [[ "$SOURCE_TAG" == "vdev" ]]; then export FULL_VERSION_NAME="latest dev. version $VERSION_NAME" ; fi + echo "VERSION_TAG $VERSION_TAG" + if [[ "$VERSION_TAG" == "" ]]; then export VERSION_TAG="vdev" ; fi + if [[ "$VERSION_TAG" == "vdev" ]]; then export VERSION_NAME=$VERSION_NAME"-dev" ; fi + if [[ "$VERSION_TAG" == "vdev" ]]; then export FULL_VERSION_NAME="latest dev. version $VERSION_NAME" ; fi + echo "VERSION_NAME $VERSION_NAME" + echo "FULL_VERSION_NAME $FULL_VERSION_NAME" + echo "::set-output name=FULL_VERSION_NAME::$FULL_VERSION_NAME" + echo "::set-output name=VERSION_NAME::$VERSION_NAME" + echo "::set-output name=VERSION_BRANCH::$VERSION_BRANCH" + echo "::set-output name=VERSION_TAG::$VERSION_TAG" + + - name: Test-Versioning + env: + FULL_VERSION_NAME: ${{steps.versioning.outputs.FULL_VERSION_NAME}} + VERSION_NAME: ${{steps.versioning.outputs.VERSION_NAME}} + VERSION_BRANCH: ${{steps.versioning.outputs.VERSION_BRANCH}} + VERSION_TAG: ${{steps.versioning.outputs.VERSION_TAG}} + run: | + echo "FULL_VERSION_NAME: $FULL_VERSION_NAME" + echo "VERSION_NAME : $VERSION_NAME" + echo "VERSION_BRANCH : $VERSION_BRANCH" + echo "VERSION_TAG : $VERSION_TAG" + + - name: Before deploy + if: ${{ matrix.python-version == '3.6'}} + env: + FULL_VERSION_NAME: ${{steps.versioning.outputs.FULL_VERSION_NAME}} + VERSION_NAME: ${{steps.versioning.outputs.VERSION_NAME}} + VERSION_BRANCH: ${{steps.versioning.outputs.VERSION_BRANCH}} + VERSION_TAG: ${{steps.versioning.outputs.VERSION_TAG}} + id: before_deploy + run: | echo "VERSION_NAME $VERSION_NAME" echo "FULL_VERSION_NAME $FULL_VERSION_NAME" + echo "VERSION_TAG $VERSION_TAG" pip install pynsist pip install distlib git clone https://github.com/takluyver/pynsist @@ -86,15 +118,23 @@ jobs: cd build && zip -r "../pyDatView_"$VERSION_NAME"_portable.zip" pyDatView_$VERSION_NAME cd .. ls - env: - SOURCE_NAME: ${{ steps.rep_info.outputs.SOURCE_NAME }} - SOURCE_BRANCH: ${{ steps.rep_info.outputs.SOURCE_BRANCH }} - SOURCE_TAG: ${{ steps.rep_info.outputs.SOURCE_TAG }} - - name: Before deploy + - name: Deploy if: ${{ matrix.python-version == '3.6'}} - run: | - ls + env: + FULL_VERSION_NAME: ${{steps.versioning.outputs.FULL_VERSION_NAME}} + VERSION_NAME: ${{steps.versioning.outputs.VERSION_NAME}} + VERSION_BRANCH: ${{steps.versioning.outputs.VERSION_BRANCH}} + VERSION_TAG: ${{steps.versioning.outputs.VERSION_TAG}} + uses: svenstaro/upload-release-action@v2 + with: + repo_token: ${{ secrets.GITHUB_TOKEN }} + file: pyDatView_v*.* + release_name: ${{steps.versioning.outputs.FULL_VERSION_NAME}} + tag: vdev + overwrite: true + file_glob: true + # deploy: # provider: releases # api_key: $GITHUB_TOKEN From 2f839a8cac7720ec40e1c9d24502315a9c246bbf Mon Sep 17 00:00:00 2001 From: Emmanuel Branlard Date: Sat, 31 Jul 2021 12:36:21 -0600 Subject: [PATCH 06/36] GH: cleanup --- .github/workflows/tests.yml | 58 ++++++------------------------------- 1 file changed, 9 insertions(+), 49 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index de6c160..a1259ac 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -34,22 +34,9 @@ jobs: pip install -r _tools/travis_requirements.txt pip install -r weio/requirements.txt - - name: Repo info - id: repo_info - run: | - echo ::set-output name=SOURCE_NAME::${GITHUB_REF#refs/*/} - echo ::set-output name=SOURCE_BRANCH::${GITHUB_REF#refs/heads/} - echo ::set-output name=SOURCE_TAG::${GITHUB_REF/refs\/tags\//} - name: System info - env: - SOURCE_NAME: ${{ steps.repo_info.outputs.SOURCE_NAME }} - SOURCE_BRANCH: ${{ steps.repo_info.outputs.SOURCE_BRANCH }} - SOURCE_TAG: ${{ steps.repo_info.outputs.SOURCE_TAG }} run: | - echo "SOURCE_NAME: $SOURCE_NAME" - echo "SOURCE_BRANCH: $SOURCE_BRANCH" - echo "SOURCE TAG: $SOURCE_TAG" echo "Actor : $GITHUB_ACTOR" echo "Branch: $GITHUB_REF" pip list @@ -65,47 +52,38 @@ jobs: git fetch --unshallow export VERSION_NAME=`git describe | sed 's/\(.*\)-.*/\1/'` export FULL_VERSION_NAME="version $VERSION_NAME" - echo "VERSION TAG attemps" - export VERSION_TAG=${GITHUB_REF/refs\/tags\//} - if [[ $GITHUB_REF != *"tags" ]]; then export VERSION_TAG="Yep it has tag" ; fi - if [[ $GITHUB_REF != *"tags" ]]; then export VERSION_TAG="" ; fi + echo "GITHUB_REF: $GITHUB_REF" echo "VERSION_NAME $VERSION_NAME" echo "FULL_VERSION_NAME $FULL_VERSION_NAME" + if [[ $GITHUB_REF == *"tags" ]]; then export VERSION_TAG=${GITHUB_REF/refs\/tags\//} ; fi + if [[ $GITHUB_REF != *"tags" ]]; then export VERSION_TAG="" ; fi echo "VERSION_TAG $VERSION_TAG" if [[ "$VERSION_TAG" == "" ]]; then export VERSION_TAG="vdev" ; fi if [[ "$VERSION_TAG" == "vdev" ]]; then export VERSION_NAME=$VERSION_NAME"-dev" ; fi if [[ "$VERSION_TAG" == "vdev" ]]; then export FULL_VERSION_NAME="latest dev. version $VERSION_NAME" ; fi - echo "VERSION_NAME $VERSION_NAME" - echo "FULL_VERSION_NAME $FULL_VERSION_NAME" + echo "VERSION_NAME: $VERSION_NAME" + echo "FULL_VERSION_NAME: $FULL_VERSION_NAME" echo "::set-output name=FULL_VERSION_NAME::$FULL_VERSION_NAME" echo "::set-output name=VERSION_NAME::$VERSION_NAME" - echo "::set-output name=VERSION_BRANCH::$VERSION_BRANCH" echo "::set-output name=VERSION_TAG::$VERSION_TAG" - name: Test-Versioning env: FULL_VERSION_NAME: ${{steps.versioning.outputs.FULL_VERSION_NAME}} VERSION_NAME: ${{steps.versioning.outputs.VERSION_NAME}} - VERSION_BRANCH: ${{steps.versioning.outputs.VERSION_BRANCH}} VERSION_TAG: ${{steps.versioning.outputs.VERSION_TAG}} run: | echo "FULL_VERSION_NAME: $FULL_VERSION_NAME" echo "VERSION_NAME : $VERSION_NAME" - echo "VERSION_BRANCH : $VERSION_BRANCH" echo "VERSION_TAG : $VERSION_TAG" - name: Before deploy if: ${{ matrix.python-version == '3.6'}} env: - FULL_VERSION_NAME: ${{steps.versioning.outputs.FULL_VERSION_NAME}} - VERSION_NAME: ${{steps.versioning.outputs.VERSION_NAME}} - VERSION_BRANCH: ${{steps.versioning.outputs.VERSION_BRANCH}} - VERSION_TAG: ${{steps.versioning.outputs.VERSION_TAG}} + VERSION_NAME: ${{steps.versioning.outputs.VERSION_NAME}} id: before_deploy run: | echo "VERSION_NAME $VERSION_NAME" - echo "FULL_VERSION_NAME $FULL_VERSION_NAME" - echo "VERSION_TAG $VERSION_TAG" pip install pynsist pip install distlib git clone https://github.com/takluyver/pynsist @@ -122,10 +100,9 @@ jobs: - name: Deploy if: ${{ matrix.python-version == '3.6'}} env: - FULL_VERSION_NAME: ${{steps.versioning.outputs.FULL_VERSION_NAME}} - VERSION_NAME: ${{steps.versioning.outputs.VERSION_NAME}} - VERSION_BRANCH: ${{steps.versioning.outputs.VERSION_BRANCH}} - VERSION_TAG: ${{steps.versioning.outputs.VERSION_TAG}} + FULL_VERSION_NAME: ${{steps.versioning.outputs.FULL_VERSION_NAME}} + VERSION_NAME: ${{steps.versioning.outputs.VERSION_NAME}} + VERSION_TAG: ${{steps.versioning.outputs.VERSION_TAG}} uses: svenstaro/upload-release-action@v2 with: repo_token: ${{ secrets.GITHUB_TOKEN }} @@ -134,20 +111,3 @@ jobs: tag: vdev overwrite: true file_glob: true - -# deploy: -# provider: releases -# api_key: $GITHUB_TOKEN -# file_glob: true -# overwrite: true -# skip_cleanup: true -# file: -# - pyDatView*.exe -# - pyDatView*.zip -# name: $FULL_VERSION_NAME -# target_commitish: $TRAVIS_COMMIT -# tag_name: $TRAVIS_TAG -# on: -# tags: true -# branch: master -# condition: $TRAVIS_PYTHON_VERSION = 3.6 From eb6ea8ad0749d27906bc086cb1687a6845fd935b Mon Sep 17 00:00:00 2001 From: Emmanuel Branlard Date: Sat, 31 Jul 2021 12:47:58 -0600 Subject: [PATCH 07/36] GH: controlling deployment (Closes #100) --- .github/workflows/tests.yml | 59 +++++++++++++++++++++++++------------ 1 file changed, 40 insertions(+), 19 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index a1259ac..3a51c28 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -12,9 +12,10 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [3.6] #, 3.7, 3.8] # 2.7, + python-version: [3.6, 3.8] # 2.7, steps: + # --- Install steps - name: Checkout uses: actions/checkout@v2 with: @@ -25,16 +26,12 @@ jobs: with: python-version: ${{ matrix.python-version }} - - name: Install system dependencies - run: sudo apt-get install nsis - - name: Install python dependencies run: | python -m pip install --upgrade pip pip install -r _tools/travis_requirements.txt pip install -r weio/requirements.txt - - name: System info run: | echo "Actor : $GITHUB_ACTOR" @@ -42,11 +39,33 @@ jobs: pip list ls + # --- Run Tests - name: Tests run: | make test + # --- Check if Deployment if needed + - name: Check if deployment is needed + id: check_deploy + env: + PY_VERSION: ${{matrix.python-version}} + GH_EVENT : ${{github.event_name}} + run: | + echo "GH_EVENT : $GH_EVENT" + echo "PY_VERSION : $PY_VERSION" + export OK=0 + if [[ $PY_VERSION == "3.6" ]]; then export OK=1 ; fi + if [[ "$OK" == "1" && $GH_EVENT == "push" ]]; then export OK=1 ; fi + echo "DEPLOY : $OK" + echo "::set-output name=GO::$OK" + + # --- Run Deployments + - name: Install system dependencies + if: ${{ steps.check_deploy.outputs.GO == '1'}} + run: sudo apt-get install nsis + - name: Versioning + if: ${{ steps.check_deploy.outputs.GO == '1'}} id: versioning run: | git fetch --unshallow @@ -67,23 +86,17 @@ jobs: echo "::set-output name=VERSION_NAME::$VERSION_NAME" echo "::set-output name=VERSION_TAG::$VERSION_TAG" - - name: Test-Versioning + - name: Before deploy + if: ${{ steps.check_deploy.outputs.GO == '1'}} + id: before_deploy env: FULL_VERSION_NAME: ${{steps.versioning.outputs.FULL_VERSION_NAME}} VERSION_NAME: ${{steps.versioning.outputs.VERSION_NAME}} VERSION_TAG: ${{steps.versioning.outputs.VERSION_TAG}} run: | - echo "FULL_VERSION_NAME: $FULL_VERSION_NAME" - echo "VERSION_NAME : $VERSION_NAME" - echo "VERSION_TAG : $VERSION_TAG" - - - name: Before deploy - if: ${{ matrix.python-version == '3.6'}} - env: - VERSION_NAME: ${{steps.versioning.outputs.VERSION_NAME}} - id: before_deploy - run: | - echo "VERSION_NAME $VERSION_NAME" + echo "FULL_VERSION_NAME: $FULL_VERSION_NAME" + echo "VERSION_NAME : $VERSION_NAME" + echo "VERSION_TAG : $VERSION_TAG" pip install pynsist pip install distlib git clone https://github.com/takluyver/pynsist @@ -95,10 +108,12 @@ jobs: mv build/nsis build/pyDatView_$VERSION_NAME cd build && zip -r "../pyDatView_"$VERSION_NAME"_portable.zip" pyDatView_$VERSION_NAME cd .. + cp "pyDatView_"$VERSION_NAME"_setup.exe" "pyDatView_LatestVersion_setup.exe" + cp "pyDatView_"$VERSION_NAME"_portable.zip" "pyDatView_LatestVersion_portable.zip" ls - name: Deploy - if: ${{ matrix.python-version == '3.6'}} + if: ${{ steps.check_deploy.outputs.GO == '1'}} env: FULL_VERSION_NAME: ${{steps.versioning.outputs.FULL_VERSION_NAME}} VERSION_NAME: ${{steps.versioning.outputs.VERSION_NAME}} @@ -106,8 +121,14 @@ jobs: uses: svenstaro/upload-release-action@v2 with: repo_token: ${{ secrets.GITHUB_TOKEN }} - file: pyDatView_v*.* + file: pyDatView_*.* release_name: ${{steps.versioning.outputs.FULL_VERSION_NAME}} tag: vdev overwrite: true file_glob: true + body: | + Different development versions are found in the "Assets" below. + + Select the one with the highest number to get the latest development version. + + Use a file labelled "setup" for a windows installer. No admin right is required for this installation, but the application is not signed. You may use a file labelled "portable" for a self contained zip files. From 597303d5e335603f26f144afdbb4654e480fa934 Mon Sep 17 00:00:00 2001 From: Emmanuel Branlard Date: Sat, 31 Jul 2021 13:51:36 -0600 Subject: [PATCH 08/36] GH: removed travis config (See #100) --- .travis.yml | 58 ----------------------------------------------------- 1 file changed, 58 deletions(-) delete mode 100644 .travis.yml diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 962d72d..0000000 --- a/.travis.yml +++ /dev/null @@ -1,58 +0,0 @@ -# Travis-CI file for pyDatView -language: python - -python: - - "2.7" - - "3.6" - - "3.7" - - "3.8" -os: - - linux -sudo: true - -install: -# - sudo apt-get install python-wxgtk3.0 - - sudo apt-get install nsis - - pip install -r _tools/travis_requirements.txt - - pip install -r weio/requirements.txt - -script: - - make test - - if [[ "$TRAVIS_TAG" == "" ]]; then export TRAVIS_TAG="vdev" ; fi - -before_deploy: - - git fetch --unshallow - - export VERSION_NAME=`git describe | sed 's/\(.*\)-.*/\1/'` - - export FULL_VERSION_NAME="version $VERSION_NAME" - - if [[ "$TRAVIS_TAG" == "vdev" ]]; then export VERSION_NAME=$VERSION_NAME"-dev" ; fi - - if [[ "$TRAVIS_TAG" == "vdev" ]]; then export FULL_VERSION_NAME="latest dev. version $VERSION_NAME" ; fi - - echo $VERSION_NAME - - pip install pynsist - - pip install distlib - - git clone https://github.com/takluyver/pynsist - - mv pynsist/nsist nsist - - make installer - - mv build/nsis/pyDatView.exe "pyDatView_"$VERSION_NAME"_setup.exe" - - mv _tools/pyDatView.cmd build/nsis/ - - mv _tools/pyDatView.exe build/nsis/ - - mv build/nsis build/pyDatView_$VERSION_NAME - - cd build && zip -r "../pyDatView_"$VERSION_NAME"_portable.zip" pyDatView_$VERSION_NAME - - cd .. - - ls - -deploy: - provider: releases - api_key: $GITHUB_TOKEN - file_glob: true - overwrite: true - skip_cleanup: true - file: - - pyDatView*.exe - - pyDatView*.zip - name: $FULL_VERSION_NAME - target_commitish: $TRAVIS_COMMIT - tag_name: $TRAVIS_TAG - on: - tags: true - branch: master - condition: $TRAVIS_PYTHON_VERSION = 3.6 From 1889d5cf16ab1e814c4b21cd44bdbebfa53bcdd7 Mon Sep 17 00:00:00 2001 From: Emmanuel Branlard Date: Sat, 31 Jul 2021 13:53:12 -0600 Subject: [PATCH 09/36] weio: misc updates --- weio | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/weio b/weio index b1addf6..0527c0f 160000 --- a/weio +++ b/weio @@ -1 +1 @@ -Subproject commit b1addf69acf817ec42de62711dd8dff94c8e9f4b +Subproject commit 0527c0f864895f9557e6f4ff7decd4eeccc0643c From 06873e3b107029ee26c8f2c755325e670eede6a8 Mon Sep 17 00:00:00 2001 From: Emmanuel Branlard Date: Sat, 31 Jul 2021 14:30:29 -0600 Subject: [PATCH 10/36] Fix panning/zoom switch for latest matplotlib, backward compability to check (see #98) --- pydatview/GUIToolBox.py | 52 ++++++++++++++++++++++++----------------- 1 file changed, 30 insertions(+), 22 deletions(-) diff --git a/pydatview/GUIToolBox.py b/pydatview/GUIToolBox.py index 4606d36..a32141e 100644 --- a/pydatview/GUIToolBox.py +++ b/pydatview/GUIToolBox.py @@ -195,6 +195,11 @@ def _update(self,currentaxes=None): class MyNavigationToolbar2Wx(NavigationToolbar2Wx): + """ + Wrapped version of the Navigation toolbar from WX with the following features: + - Tools can be removed, if not in `keep_tools` + - Zoom is set by default, and the toggling between zoom and pan is handled internally + """ def __init__(self, canvas, keep_tools): # Taken from matplotlib/backend_wx.py but added style: self.VERSION = matplotlib.__version__ @@ -215,6 +220,8 @@ def __init__(self, canvas, keep_tools): else: NavigationToolbar2Wx.__init__(self, canvas) + self.pan_on=False + # Make sure we start in zoom mode if 'Pan' in keep_tools: self.zoom() # NOTE: #22 BREAK cursors #12! @@ -225,32 +232,33 @@ def __init__(self, canvas, keep_tools): if t.GetLabel() not in keep_tools: self.DeleteToolByPos(i) - def press_zoom(self, event): - NavigationToolbar2Wx.press_zoom(self,event) - #self.SetToolBitmapSize((22,22)) - - def press_pan(self, event): - NavigationToolbar2Wx.press_pan(self,event) - def zoom(self, *args): - NavigationToolbar2Wx.zoom(self,*args) + # NEW - MPL>=3.0.0 + if self.pan_on: + pass + else: + NavigationToolbar2.zoom(self,*args) # We skip wx and use the parent + # BEFORE + #NavigationToolbar2Wx.zoom(self,*args) def pan(self, *args): - try: - #if self.VERSION[0]=='2' or self.VERSION[0]=='1': - isPan = self._active=='PAN' - except: - try: - from matplotlib.backend_bases import _Mode - isPan = self.mode == _Mode.PAN - except: - raise Exception('Pan not found, report a pyDatView bug, with matplotlib version.') - if isPan: - NavigationToolbar2Wx.pan(self,*args) + self.pan_on=not self.pan_on + # NEW - MPL >= 3.0.0 + NavigationToolbar2.pan(self, *args) # We skip wx and use to parent + if not self.pan_on: self.zoom() - else: - NavigationToolbar2Wx.pan(self,*args) - + # BEFORE + #try: + # isPan = self._active=='PAN' + #except: + # try: + # from matplotlib.backend_bases import _Mode + # isPan = self.mode == _Mode.PAN + # except: + # raise Exception('Pan not found, report a pyDatView bug, with matplotlib version.') + #NavigationToolbar2Wx.pan(self,*args) + #if isPan: + # self.zoom() def home(self, *args): """Restore the original view.""" From 66e7c1f782a7905a700e87879d0ae31a14567847 Mon Sep 17 00:00:00 2001 From: Emmanuel Branlard Date: Sat, 31 Jul 2021 14:49:25 -0600 Subject: [PATCH 11/36] Temporary fix, discard axAssertionError with 0 bitmap on macos --- pydatview/main.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pydatview/main.py b/pydatview/main.py index 07eedce..6e55868 100644 --- a/pydatview/main.py +++ b/pydatview/main.py @@ -604,8 +604,13 @@ def MyExceptionHook(etype, value, trace): :param string `trace`: the traceback header, if any (otherwise, it prints the standard Python header: ``Traceback (most recent call last)``. """ + from wx._core import wxAssertionError # Printing exception traceback.print_exception(etype, value, trace) + if etype==wxAssertionError: + if wx.Platform == '__WXMAC__': + # We skip these exceptions on macos (likely bitmap size 0) + return # Then showing to user the last error frame = wx.GetApp().GetTopWindow() tmp = traceback.format_exception(etype, value, trace) From 727fdee1b6e32d91c68fdd13c0babb810a3de53e Mon Sep 17 00:00:00 2001 From: Emmanuel Branlard Date: Sun, 1 Aug 2021 20:03:55 -0600 Subject: [PATCH 12/36] Tools: curve fitting for 2nd order system --- pydatview/tools/curve_fitting.py | 217 +++++++++++++++++++++++++++++-- 1 file changed, 209 insertions(+), 8 deletions(-) diff --git a/pydatview/tools/curve_fitting.py b/pydatview/tools/curve_fitting.py index dc5e9c8..69d6d34 100644 --- a/pydatview/tools/curve_fitting.py +++ b/pydatview/tools/curve_fitting.py @@ -234,6 +234,33 @@ def sinusoid_f(x, p): """ p = (A,f,phi_deg,B) """ return p[0]*np.sin(2*pi*(p[1]*x+p[2]/360)) + p[3] + + +def secondorder_impulse(t, p): + """ p = (A, omega0, zeta, B, t0) """ + A, omega0, zeta, B, t0 = p + omegad = omega0 * sqrt(1-zeta**2) + phi = np.arctan2(zeta, sqrt(1-zeta**2)) + x = np.zeros(t.shape) + bp = t>=t0 + t = t[bp]-t0 + x[bp] += A * sin(omegad * t) * exp(-zeta * omega0 * t) + x+=B + return x + +def secondorder_step(t, p): + """ p = (A, omega0, zeta, B, t0) """ + A, omega0, zeta, B, t0 = p + omegad = omega0 * sqrt(1-zeta**2) + phi = np.arctan2(zeta, sqrt(1-zeta**2)) + x = np.zeros(t.shape) + bp = t>=t0 + t = t[bp]-t0 + x[bp] += A * ( 1- exp(-zeta*omega0 *t)/sqrt(1-zeta**2) * cos(omegad*t - phi)) + x+=B + return x + + def gentorque(x, p): """ INPUTS: @@ -300,6 +327,17 @@ def gentorque(x, p): 'coeffs' :'a=1, b=0', # Order Important 'consts' :None, 'bounds' :None}, +{'label':'2nd order impulse/decay (manual)', 'handle': secondorder_impulse, 'id':'predef: secondorder_impulse', +'formula':'{A}*exp(-{zeta}*{omega}*(x-{x0})) * sin({omega}*sqrt(1-{zeta}**2))) +{B}', +'coeffs' :'A=1, omega=1, zeta=0.001, B=0, x0=0', # Order Important +'consts' :None, +'bounds' :'A=(-inf,inf), omega=(0,100), zeta=(0,1), B=(-inf,inf), x0=(-inf,inf)'}, +{'label':'2nd order step (manual)', 'handle': secondorder_step, 'id':'predef: secondorder_step', +'formula':'{A}*(1-exp(-{zeta}*{omega}*(x-{x0}))/sqrt(1-{zeta}**2) * cos({omega}*sqrt(1-{zeta}**2)-arctan({zeta}/sqrt(1-{zeta}**2)))) +{B}', +'coeffs' :'A=1, omega=1, zeta=0.001, B=0, x0=0', # Order Important +'consts' :None, +'bounds' :'A=(-inf,inf), omega=(0,100), zeta=(0,1), B=(-inf,inf), x0=(-inf,inf)'}, + # --- Wind Energy {'label':'Power law (alpha)', 'handle':powerlaw_alpha, 'id':'predef: powerlaw_alpha', 'formula':'{u_ref} * (z / {z_ref}) ** {alpha}', @@ -729,6 +767,16 @@ def plot(self, x=None, fig=None, ax=None): #ax.set_ylabel('') return fig,ax + def print_guessbounds(self): + s='' + p0 = self.model['coeffs_init'] + bounds = self.model['bounds'] + for i,(k,v) in enumerate(self.model['coeffs'].items()): + print( (pretty_num(bounds[0][i]),pretty_num(p0[i]), pretty_num(bounds[1][i])) ) + s+='{:15s}: {:10s} < {:10s} < {:10s}\n'.format(k, pretty_num(bounds[0][i]),pretty_num(p0[i]), pretty_num(bounds[1][i])) + print(s) + + def __repr__(self): s='<{} object> with fields:\n'.format(type(self).__name__) s+=' - data, dictionary with keys: \n' @@ -740,6 +788,146 @@ def __repr__(self): return s +# --------------------------------------------------------------------------------} +# --- Wrapper for predefined fitters +# --------------------------------------------------------------------------------{ +class PredefinedModelFitter(ModelFitter): + def __init__(self, x=None, y=None, p0=None, bounds=None, **kwargs): + ModelFitter.__init__(self,x=None, y=None, p0=p0, bounds=bounds) # NOTE: not passing data + + self.kwargs=kwargs + + if x is not None and y is not None: + self.fit_data(x,y,p0,bounds) + + def setup_model(self): + """ + Setup model: + - guess/coeffs_init: return params in format needed for curve_fit (p0,p1,p2,p3) + - bound : bounds in format needed for curve_fit ((low0,low1,low2), (high0, high1)) + - coeffs : OrderedDict, necessary for user print + - formula : necessary for user print + """ + #self.model['coeffs'] = OrderedDict([(var,1) for i,var in enumerate(variables)]) + #self.model['formula'] = '' + #self.model['coeffs_init']=p_guess + #self.model['bounds']=bounds_guess + raise NotImplementedError('To be implemented by child class') + + def model_function(self, x, p): + raise NotImplementedError('To be implemented by child class') + + def fit_data(self, x, y, p0=None, bounds=None): + # Cleaning data + x,y=self.clean_data(x,y) + + # --- setup model + # guess initial parameters, potential bounds, and set necessary data + self.setup_model() + + # --- Minimization + minimize_me = lambda x, *p : self.model_function(x, p) + if self.model['bounds'] is None: + pfit, pcov = so.curve_fit(minimize_me, x, y, p0=self.model['coeffs_init']) + else: + pfit, pcov = so.curve_fit(minimize_me, x, y, p0=self.model['coeffs_init'], bounds=self.model['bounds']) + # --- Reporting information about the fit (after the fit) + # And Return a fitted function + y_fit = self.model_function(x, pfit) + self.model['fitted_function']=lambda xx : self.model_function(xx, pfit) + self.store_fit_info(y_fit, pfit) + + def plot_guess(self, x=None, fig=None, ax=None): + """ plotthe guess values""" + if x is None: + x=self.data['x'] + import matplotlib.pyplot as plt + if fig is None: + fig,ax = plt.subplots(1, 1, sharey=False, figsize=(6.4,4.8)) # (6.4,4.8) + fig.subplots_adjust(left=0.12, right=0.95, top=0.95, bottom=0.11, hspace=0.20, wspace=0.20) + + p_guess = self.model['coeffs_init'] + + ax.plot(self.data['x'], self.data['y'] , '.', label='Data') + ax.plot(x, self.model_function(x,p_guess), '-', label='Model at guessed parameters') + ax.legend() + + +# --------------------------------------------------------------------------------} +# --- Predefined fitters +# --------------------------------------------------------------------------------{ +class SecondOrderFitterImpulse(PredefinedModelFitter): + + def model_function(self, x, p): + return secondorder_impulse(x, p) + + def setup_model(self): + """ p = (A, omega0, zeta, B, t0) """ + self.model['coeffs'] = OrderedDict([('A',1),('omega',1),('zeta',0.01),('B',0),('t0',0)]) + self.model['formula'] = '{A}*exp(-{zeta}*{omega}*(x-{x0}))*sin({omega}*sqrt(1-{zeta}**2)))+{B}' + + # --- Guess Initial values + x, y = self.data['x'],self.data['y'] + # TODO use signal + dt = x[1]-x[0] + omega0 = main_frequency(x,y) + A = np.max(y) - np.min(y) + B = np.mean(y) + zeta = 0.1 + y_start = y[0]+0.01*A + bDeviate = np.argwhere(abs(y-y_start)>abs(y_start-y[0]))[0] + t0 = x[bDeviate[0]] + p_guess = np.array([A, omega0, zeta, B, t0]) + self.model['coeffs_init'] = p_guess + # --- Set Bounds + T = x[-1]-x[0] + dt = x[1]-x[0] + om_min = 2*np.pi/T/2 + om_max = 2*np.pi/dt/2 + b_A = (A*0.1,A*3) + b_om = (om_min,om_max) + b_zeta = (0,1) + b_B = (np.min(y),np.max(y)) + b_x0 = (np.min(x),np.max(x)) + self.model['bounds'] = ((b_A[0],b_om[0],b_zeta[0],b_B[0],b_x0[0]),(b_A[1],b_om[1],b_zeta[1],b_B[1],b_x0[1])) + #self.plot_guess(); import matplotlib.pyplot as plt; plt.show() + #self.print_guessbounds(); + +class SecondOrderFitterStep(PredefinedModelFitter): + + def model_function(self, x, p): + return secondorder_step(x, p) + + def setup_model(self): + """ p = (A, omega0, zeta, B, t0) """ + self.model['coeffs'] = OrderedDict([('A',1),('omega',1),('zeta',0.01),('B',0),('t0',0)]) + self.model['formula'] ='{A}*(1-exp(-{zeta}*{omega}*(x-{x0}))/sqrt(1-{zeta}**2) * cos({omega}*sqrt(1-{zeta}**2)-arctan({zeta}/sqrt(1-{zeta}**2)))) +{B}' + # --- Guess Initial values + x, y = self.data['x'],self.data['y'] + # TODO use signal + omega0 = main_frequency(x,y) + A = np.max(y) - np.min(y) + B = y[0] + zeta = 0.1 + y_start = y[0]+0.01*A + bDeviate = np.argwhere(abs(y-y_start)>abs(y_start-y[0]))[0] + t0 = x[bDeviate[0]] + p_guess = np.array([A, omega0, zeta, B, t0]) + self.model['coeffs_init'] = p_guess + # --- Set Bounds + T = x[-1]-x[0] + dt = x[1]-x[0] + om_min = 2*np.pi/T/2 + om_max = 2*np.pi/dt/2 + b_A = (A*0.1,A*3) + b_om = (om_min,om_max) + b_zeta = (0,1) + b_B = (np.min(y),np.max(y)) + b_x0 = (np.min(x),np.max(x)) + self.model['bounds'] = ((b_A[0],b_om[0],b_zeta[0],b_B[0],b_x0[0]),(b_A[1],b_om[1],b_zeta[1],b_B[1],b_x0[1])) + #self.plot_guess(); import matplotlib.pyplot as plt; plt.show() + #self.print_guessbounds(); + # --------------------------------------------------------------------------------} # --- Predefined fitter # --------------------------------------------------------------------------------{ @@ -853,14 +1041,8 @@ def fit_data(self, x, y, p0=None, bounds=None): # Cleaning data x,y=self.clean_data(x,y) - nParams=4 - # TODO use signal - dt = x[1]-x[0] - ff = np.fft.fftfreq(len(x), (dt)) # assume uniform spacing - Fyy = abs(np.fft.fft(y)) - guess_freq = abs(ff[np.argmax(Fyy[1:])+1]) # excluding the zero frequency "peak", which is related to offset - + guess_freq= main_frequency(x,y)/(2*np.pi) # [Hz] guess_amp = np.std(y) * 2.**0.5 guess_offset = np.mean(y) if self.physical: @@ -884,6 +1066,7 @@ def fit_data(self, x, y, p0=None, bounds=None): self.store_fit_info(y_fit, pfit) + class GeneratorTorqueFitter(ModelFitter): def __init__(self,x=None, y=None, p0=None, bounds=None): ModelFitter.__init__(self,x=None, y=None, p0=p0, bounds=bounds) @@ -982,6 +1165,7 @@ def minimize_me(p): +# --- Registering FITTERS. The formula info is redundant, only used by pyDatView, should be removed FITTERS= [ {'label':'Polynomial (full)' ,'id':'fitter: polynomial_continuous', 'handle': ContinuousPolynomialFitter, 'consts':{'order':3}, 'formula': '{a_i} x^i'}, @@ -989,6 +1173,10 @@ def minimize_me(p): 'consts':{'exponents':[0,2,3]},'formula': '{a_i} x^j'}, {'label':'Sinusoid','id':'fitter: sinusoid' , 'handle': SinusoidFitter , 'consts':{'physical':True},'formula': '{A}*sin({omega or 2 pi f}*x+{phi or phi_deg}) + {B} '}, +{'label':'2nd order impulse/decay (auto)','id':'fitter: secondorder_impulse', 'handle': SecondOrderFitterImpulse , +'consts':{},'formula': '{A}*exp(-{zeta}*{omega}*(x-{x0}))*sin({omega}*sqrt(1-{zeta}**2)))+{B}'}, +{'label':'2nd order step (auto)','id':'fitter: secondorder_step', 'handle': SecondOrderFitterStep , +'consts':{},'formula':'{A}*(1-exp(-{zeta}*{omega}*(x-{x0}))/sqrt(1-{zeta}**2)*cos({omega}*sqrt(1-{zeta}**2)-arctan({zeta}/sqrt(1-{zeta}**2))))+{B}'}, # {'label':'Generator Torque','id':'fitter: gentorque' , 'handle': GeneratorTorqueFitter , # 'consts':{},'formula': ''} ] @@ -1092,6 +1280,7 @@ def _clean_formula(s, latex=False): s = s.replace('phi',r'\phi') s = s.replace('alpha',r'\alpha') s = s.replace('beta' ,r'\alpha') + s = s.replace('zeta' ,r'\zeta') s = s.replace('mu' ,r'\mu' ) s = s.replace('pi' ,r'\pi' ) s = s.replace('sigma',r'\sigma') @@ -1109,6 +1298,18 @@ def _clean_formula(s, latex=False): s = s.replace('{','').replace('}','') return s + +def main_frequency(t,y): + """ + Returns main frequency of a signal + NOTE: this tool below to welib.tools.signal, but put here for convenience + """ + dt = t[1]-t[0] # assume uniform spacing of time and frequency + om = np.fft.fftfreq(len(t), (dt))*2*np.pi + Fyy = abs(np.fft.fft(y)) + omega = abs(om[np.argmax(Fyy[1:])+1]) # exclude the zero frequency (mean) + return omega + def rsquare(y, f): """ Compute coefficient of determination of data fit model and RMSE [r2] = rsquare(y,f) @@ -1131,7 +1332,7 @@ def rsquare(y, f): return R2 def pretty_param(s): - if s in ['alpha','beta','delta','gamma','epsilon','lambda','mu','nu','pi','rho','sigma','phi','psi','omega']: + if s in ['alpha','beta','delta','gamma','epsilon','zeta','lambda','mu','nu','pi','rho','sigma','phi','psi','omega']: s = r'\{}'.format(s) s = s.replace('_ref',r'_{ref}') # make this general.. return s From af5e700b8de6d9cd971122f2b6e4276ae2ce59a6 Mon Sep 17 00:00:00 2001 From: Emmanuel Branlard Date: Wed, 17 Nov 2021 18:09:06 -0700 Subject: [PATCH 13/36] Adding raaw mat file --- installer.cfg | 215 +++++++++++++++++++++++++------------------------- weio | 2 +- 2 files changed, 110 insertions(+), 107 deletions(-) diff --git a/installer.cfg b/installer.cfg index f2b40b5..ca5eba3 100644 --- a/installer.cfg +++ b/installer.cfg @@ -1,106 +1,109 @@ -[Application] -name=pyDatView -version=0.2 -entry_point=pydatview:show - -#[Command pydatview] -#entry_point=pydatview:cmdline - -[Python] -version=3.6.0 -bitness=64 - -[Include] -pypi_wheels = - numpy==1.19.3 - wxPython==4.0.3 - matplotlib==3.0.0 - pyparsing==2.2.2 - cycler==0.10.0 - six==1.11.0 - python-dateutil==2.7.3 - kiwisolver==1.0.1 - pandas==0.23.4 - pytz==2018.5 - chardet==3.0.4 - scipy==1.1.0 - pyarrow==4.0.1 - -# numpy==1.20.3 -# wxPython==4.0.7 -# matplotlib==3.4.2 -# pyparsing==2.4.7 -# cycler==0.10.0 -# six==1.11.0 -# python-dateutil==2.7.3 -# kiwisolver==1.0.1 -# pandas==1.1.5 -# pytz==2018.5 -# chardet==3.0.4 -# scipy==1.5.4 - -# PyYAML==5.1.2 - -packages=weio - future - - -exclude=weio/.git* - weio/tests - pkgs/weio/examples - pkgs/weio/weio/tests - pkgs/numpy/core/include - pkgs/numpy/doc - pkgs/numpy/f2py - pkgs/numpy/core/lib - pkgs/numpy/tests - pkgs/numpy/*/tests - pkgs/pyarrow/include - pkgs/pyarrow/includes - pkgs/pyarrow/tensorflow - pkgs/pyarrow/tests - pkgs/pandas/tests - pkgs/matplotlib/sphinxext - pkgs/matplotlib/testing - pkgs/matplotlib/mpl-data/sample_data - pkgs/matplotlib/mpl-data/fonts - pkgs/matplotlib/mpl-data/images/*.pdf - pkgs/matplotlib/mpl-data/images/*.svg - pkgs/matplotlib/mpl-data/images/*.ppm - pkgs/matplotlib/mpl-data/stylelib/seaborn*.mplstyle - pkgs/matplotlib/backends/qt_editor - pkgs/matplotlib/backends/web_backend - pkgs/wx/locale - pkgs/wx/py - pkgs/wx/lib/agw - pkgs/wx/lib/analogclock - pkgs/wx/lib/art - pkgs/wx/lib/colourchooser - pkgs/wx/lib/editor - pkgs/wx/lib/floatcanvas - pkgs/wx/lib/gizmos - pkgs/wx/lib/masked - pkgs/wx/lib/ogl - pkgs/wx/lib/pdfviewer - pkgs/wx/lib/plot - pkgs/wx/lib/pubsub - pkgs/wx/lib/wxcairo - pkgs/scipy/cluster - pkgs/scipy/constants - pkgs/scipy/fftpack - pkgs/scipy/io - pkgs/scipy/ndimage - pkgs/scipy/odr - pkgs/scipy/signal - pkgs/scipy/extra-dll/libbanded* - pkgs/scipy/extra-dll/libd_odr* - pkgs/scipy/extra-dll/libdcosqb* - pkgs/scipy/extra-dll/libdfft_sub* - pkgs/scipy/*/tests - -# pkgs\matplotlib\mpl-data -##Click==7.0 - -[Build] -#directory= -installer_name=pyDatView.exe +[Application] +name=pyDatView +version=0.2 +entry_point=pydatview:show + +#[Command pydatview] +#entry_point=pydatview:cmdline + +[Python] +version=3.6.0 +bitness=64 + +[Include] +pypi_wheels = + numpy==1.19.3 + wxPython==4.0.3 + matplotlib==3.0.0 + pyparsing==2.2.2 + cycler==0.10.0 + six==1.11.0 + python-dateutil==2.7.3 + kiwisolver==1.0.1 + pandas==0.23.4 + pytz==2018.5 + chardet==3.0.4 + scipy==1.1.0 + pyarrow==4.0.1 + +# numpy==1.20.3 +# wxPython==4.0.7 +# matplotlib==3.4.2 +# pyparsing==2.4.7 +# cycler==0.10.0 +# six==1.11.0 +# python-dateutil==2.7.3 +# kiwisolver==1.0.1 +# pandas==1.1.5 +# pytz==2018.5 +# chardet==3.0.4 +# scipy==1.5.4 + +# PyYAML==5.1.2 + +packages=weio + future + + +exclude=weio/.git* + weio/tests + pkgs/weio/examples + pkgs/weio/weio/tests + pkgs/numpy/core/include + pkgs/numpy/doc + pkgs/numpy/f2py + pkgs/numpy/core/lib + pkgs/numpy/tests + pkgs/numpy/*/tests + pkgs/pyarrow/include + pkgs/pyarrow/includes + pkgs/pyarrow/tensorflow + pkgs/pyarrow/tests + pkgs/pandas/tests + pkgs/matplotlib/sphinxext + pkgs/matplotlib/testing + pkgs/matplotlib/mpl-data/sample_data + pkgs/matplotlib/mpl-data/fonts + pkgs/matplotlib/mpl-data/images/*.pdf + pkgs/matplotlib/mpl-data/images/*.svg + pkgs/matplotlib/mpl-data/images/*.ppm + pkgs/matplotlib/mpl-data/stylelib/seaborn*.mplstyle + pkgs/matplotlib/backends/qt_editor + pkgs/matplotlib/backends/web_backend + pkgs/wx/locale + pkgs/wx/py + pkgs/wx/lib/agw + pkgs/wx/lib/analogclock + pkgs/wx/lib/art + pkgs/wx/lib/colourchooser + pkgs/wx/lib/editor + pkgs/wx/lib/floatcanvas + pkgs/wx/lib/gizmos + pkgs/wx/lib/masked + pkgs/wx/lib/ogl + pkgs/wx/lib/pdfviewer + pkgs/wx/lib/plot + pkgs/wx/lib/pubsub + pkgs/wx/lib/wxcairo + pkgs/scipy/cluster + pkgs/scipy/constants + pkgs/scipy/fftpack + pkgs/scipy/io/tests + pkgs/scipy/io/arff + pkgs/scipy/io/matlab/tests + pkgs/scipy/io/harwell_boieng/tests + pkgs/scipy/ndimage + pkgs/scipy/odr + pkgs/scipy/signal + pkgs/scipy/extra-dll/libbanded* + pkgs/scipy/extra-dll/libd_odr* + pkgs/scipy/extra-dll/libdcosqb* + pkgs/scipy/extra-dll/libdfft_sub* + pkgs/scipy/*/tests + +# pkgs\matplotlib\mpl-data +##Click==7.0 + +[Build] +#directory= +installer_name=pyDatView.exe diff --git a/weio b/weio index 0527c0f..a782094 160000 --- a/weio +++ b/weio @@ -1 +1 @@ -Subproject commit 0527c0f864895f9557e6f4ff7decd4eeccc0643c +Subproject commit a782094d3f14d5d806111190b3271c63dc77be26 From 66106f6746cd7cdd50bc6a7463c0d244712f04d7 Mon Sep 17 00:00:00 2001 From: Emmanuel Branlard Date: Thu, 18 Nov 2021 12:57:58 -0700 Subject: [PATCH 14/36] Update of weio --- weio | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/weio b/weio index a782094..1a04e7a 160000 --- a/weio +++ b/weio @@ -1 +1 @@ -Subproject commit a782094d3f14d5d806111190b3271c63dc77be26 +Subproject commit 1a04e7aaa825669bb020bf63b8e805991682c4be From f6d1e74e67f9edd39f68a69eecc24efad20cea15 Mon Sep 17 00:00:00 2001 From: Emmanuel Branlard Date: Thu, 18 Nov 2021 12:59:54 -0700 Subject: [PATCH 15/36] Update of executable launcher for file association (Closes #102) --- .github/workflows/tests.yml | 269 ++++++++++----------- README.md | 454 ++++++++++++++++++++---------------- _tools/Makefile | 25 +- _tools/pyDatView.c | 329 +++++++++++++++++++------- _tools/pyDatView.exe | Bin 122368 -> 299008 bytes _tools/pyDatView.rc | 1 + _tools/pyDatView_Test.bat | 4 + installer.cfg | 8 +- ressources/pyDatView.ico | Bin 0 -> 140686 bytes 9 files changed, 655 insertions(+), 435 deletions(-) create mode 100644 _tools/pyDatView.rc create mode 100644 _tools/pyDatView_Test.bat create mode 100644 ressources/pyDatView.ico diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 3a51c28..09d5540 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -1,134 +1,135 @@ - -name: 'Tests' - -on: - push: - - pull_request: - types: [opened, synchronize] #labeled, assigned] - -jobs: - build-and-test: - runs-on: ubuntu-latest - strategy: - matrix: - python-version: [3.6, 3.8] # 2.7, - - steps: - # --- Install steps - - name: Checkout - uses: actions/checkout@v2 - with: - submodules: recursive - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - - - name: Install python dependencies - run: | - python -m pip install --upgrade pip - pip install -r _tools/travis_requirements.txt - pip install -r weio/requirements.txt - - - name: System info - run: | - echo "Actor : $GITHUB_ACTOR" - echo "Branch: $GITHUB_REF" - pip list - ls - - # --- Run Tests - - name: Tests - run: | - make test - - # --- Check if Deployment if needed - - name: Check if deployment is needed - id: check_deploy - env: - PY_VERSION: ${{matrix.python-version}} - GH_EVENT : ${{github.event_name}} - run: | - echo "GH_EVENT : $GH_EVENT" - echo "PY_VERSION : $PY_VERSION" - export OK=0 - if [[ $PY_VERSION == "3.6" ]]; then export OK=1 ; fi - if [[ "$OK" == "1" && $GH_EVENT == "push" ]]; then export OK=1 ; fi - echo "DEPLOY : $OK" - echo "::set-output name=GO::$OK" - - # --- Run Deployments - - name: Install system dependencies - if: ${{ steps.check_deploy.outputs.GO == '1'}} - run: sudo apt-get install nsis - - - name: Versioning - if: ${{ steps.check_deploy.outputs.GO == '1'}} - id: versioning - run: | - git fetch --unshallow - export VERSION_NAME=`git describe | sed 's/\(.*\)-.*/\1/'` - export FULL_VERSION_NAME="version $VERSION_NAME" - echo "GITHUB_REF: $GITHUB_REF" - echo "VERSION_NAME $VERSION_NAME" - echo "FULL_VERSION_NAME $FULL_VERSION_NAME" - if [[ $GITHUB_REF == *"tags" ]]; then export VERSION_TAG=${GITHUB_REF/refs\/tags\//} ; fi - if [[ $GITHUB_REF != *"tags" ]]; then export VERSION_TAG="" ; fi - echo "VERSION_TAG $VERSION_TAG" - if [[ "$VERSION_TAG" == "" ]]; then export VERSION_TAG="vdev" ; fi - if [[ "$VERSION_TAG" == "vdev" ]]; then export VERSION_NAME=$VERSION_NAME"-dev" ; fi - if [[ "$VERSION_TAG" == "vdev" ]]; then export FULL_VERSION_NAME="latest dev. version $VERSION_NAME" ; fi - echo "VERSION_NAME: $VERSION_NAME" - echo "FULL_VERSION_NAME: $FULL_VERSION_NAME" - echo "::set-output name=FULL_VERSION_NAME::$FULL_VERSION_NAME" - echo "::set-output name=VERSION_NAME::$VERSION_NAME" - echo "::set-output name=VERSION_TAG::$VERSION_TAG" - - - name: Before deploy - if: ${{ steps.check_deploy.outputs.GO == '1'}} - id: before_deploy - env: - FULL_VERSION_NAME: ${{steps.versioning.outputs.FULL_VERSION_NAME}} - VERSION_NAME: ${{steps.versioning.outputs.VERSION_NAME}} - VERSION_TAG: ${{steps.versioning.outputs.VERSION_TAG}} - run: | - echo "FULL_VERSION_NAME: $FULL_VERSION_NAME" - echo "VERSION_NAME : $VERSION_NAME" - echo "VERSION_TAG : $VERSION_TAG" - pip install pynsist - pip install distlib - git clone https://github.com/takluyver/pynsist - mv pynsist/nsist nsist - make installer - mv build/nsis/pyDatView.exe "pyDatView_"$VERSION_NAME"_setup.exe" - mv _tools/pyDatView.cmd build/nsis/ - mv _tools/pyDatView.exe build/nsis/ - mv build/nsis build/pyDatView_$VERSION_NAME - cd build && zip -r "../pyDatView_"$VERSION_NAME"_portable.zip" pyDatView_$VERSION_NAME - cd .. - cp "pyDatView_"$VERSION_NAME"_setup.exe" "pyDatView_LatestVersion_setup.exe" - cp "pyDatView_"$VERSION_NAME"_portable.zip" "pyDatView_LatestVersion_portable.zip" - ls - - - name: Deploy - if: ${{ steps.check_deploy.outputs.GO == '1'}} - env: - FULL_VERSION_NAME: ${{steps.versioning.outputs.FULL_VERSION_NAME}} - VERSION_NAME: ${{steps.versioning.outputs.VERSION_NAME}} - VERSION_TAG: ${{steps.versioning.outputs.VERSION_TAG}} - uses: svenstaro/upload-release-action@v2 - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - file: pyDatView_*.* - release_name: ${{steps.versioning.outputs.FULL_VERSION_NAME}} - tag: vdev - overwrite: true - file_glob: true - body: | - Different development versions are found in the "Assets" below. - - Select the one with the highest number to get the latest development version. - - Use a file labelled "setup" for a windows installer. No admin right is required for this installation, but the application is not signed. You may use a file labelled "portable" for a self contained zip files. + +name: 'Tests' + +on: + push: + + pull_request: + types: [opened, synchronize] #labeled, assigned] + +jobs: + build-and-test: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [3.6, 3.8] # 2.7, + + steps: + # --- Install steps + - name: Checkout + uses: actions/checkout@v2 + with: + submodules: recursive + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + - name: Install python dependencies + run: | + python -m pip install --upgrade pip + pip install -r _tools/travis_requirements.txt + pip install -r weio/requirements.txt + + - name: System info + run: | + echo "Actor : $GITHUB_ACTOR" + echo "Branch: $GITHUB_REF" + pip list + ls + + # --- Run Tests + - name: Tests + run: | + make test + + # --- Check if Deployment if needed + - name: Check if deployment is needed + id: check_deploy + env: + PY_VERSION: ${{matrix.python-version}} + GH_EVENT : ${{github.event_name}} + run: | + echo "GH_EVENT : $GH_EVENT" + echo "PY_VERSION : $PY_VERSION" + export OK=0 + if [[ $PY_VERSION == "3.6" ]]; then export OK=1 ; fi + if [[ "$OK" == "1" && $GH_EVENT == "push" ]]; then export OK=1 ; fi + echo "DEPLOY : $OK" + echo "::set-output name=GO::$OK" + + # --- Run Deployments + - name: Install system dependencies + if: ${{ steps.check_deploy.outputs.GO == '1'}} + run: sudo apt-get install nsis + + - name: Versioning + if: ${{ steps.check_deploy.outputs.GO == '1'}} + id: versioning + run: | + git fetch --unshallow + export VERSION_NAME=`git describe | sed 's/\(.*\)-.*/\1/'` + export FULL_VERSION_NAME="version $VERSION_NAME" + echo "GITHUB_REF: $GITHUB_REF" + echo "VERSION_NAME $VERSION_NAME" + echo "FULL_VERSION_NAME $FULL_VERSION_NAME" + if [[ $GITHUB_REF == *"tags" ]]; then export VERSION_TAG=${GITHUB_REF/refs\/tags\//} ; fi + if [[ $GITHUB_REF != *"tags" ]]; then export VERSION_TAG="" ; fi + echo "VERSION_TAG $VERSION_TAG" + if [[ "$VERSION_TAG" == "" ]]; then export VERSION_TAG="vdev" ; fi + if [[ "$VERSION_TAG" == "vdev" ]]; then export VERSION_NAME=$VERSION_NAME"-dev" ; fi + if [[ "$VERSION_TAG" == "vdev" ]]; then export FULL_VERSION_NAME="latest dev. version $VERSION_NAME" ; fi + echo "VERSION_NAME: $VERSION_NAME" + echo "FULL_VERSION_NAME: $FULL_VERSION_NAME" + echo "::set-output name=FULL_VERSION_NAME::$FULL_VERSION_NAME" + echo "::set-output name=VERSION_NAME::$VERSION_NAME" + echo "::set-output name=VERSION_TAG::$VERSION_TAG" + + - name: Before deploy + if: ${{ steps.check_deploy.outputs.GO == '1'}} + id: before_deploy + env: + FULL_VERSION_NAME: ${{steps.versioning.outputs.FULL_VERSION_NAME}} + VERSION_NAME: ${{steps.versioning.outputs.VERSION_NAME}} + VERSION_TAG: ${{steps.versioning.outputs.VERSION_TAG}} + run: | + echo "FULL_VERSION_NAME: $FULL_VERSION_NAME" + echo "VERSION_NAME : $VERSION_NAME" + echo "VERSION_TAG : $VERSION_TAG" + pip install pynsist + pip install distlib + git clone https://github.com/takluyver/pynsist + mv pynsist/nsist nsist + make installer + mv build/nsis/pyDatView_setup.exe "pyDatView_"$VERSION_NAME"_setup.exe" + mv _tools/pyDatView.cmd build/nsis/ + mv _tools/pyDatView_Test.bat build/nsis/ + mv _tools/pyDatView.exe build/nsis/ + mv build/nsis build/pyDatView_$VERSION_NAME + cd build && zip -r "../pyDatView_"$VERSION_NAME"_portable.zip" pyDatView_$VERSION_NAME + cd .. + cp "pyDatView_"$VERSION_NAME"_setup.exe" "pyDatView_LatestVersion_setup.exe" + cp "pyDatView_"$VERSION_NAME"_portable.zip" "pyDatView_LatestVersion_portable.zip" + ls + + - name: Deploy + if: ${{ steps.check_deploy.outputs.GO == '1'}} + env: + FULL_VERSION_NAME: ${{steps.versioning.outputs.FULL_VERSION_NAME}} + VERSION_NAME: ${{steps.versioning.outputs.VERSION_NAME}} + VERSION_TAG: ${{steps.versioning.outputs.VERSION_TAG}} + uses: svenstaro/upload-release-action@v2 + with: + repo_token: ${{ secrets.GITHUB_TOKEN }} + file: pyDatView_*.* + release_name: ${{steps.versioning.outputs.FULL_VERSION_NAME}} + tag: vdev + overwrite: true + file_glob: true + body: | + Different development versions are found in the "Assets" below. + + Select the one with the highest number to get the latest development version. + + Use a file labelled "setup" for a windows installer. No admin right is required for this installation, but the application is not signed. You may use a file labelled "portable" for a self contained zip files. diff --git a/README.md b/README.md index ee487fa..2f14492 100644 --- a/README.md +++ b/README.md @@ -1,206 +1,248 @@ -[![Build status](https://github.com/ebranlard/pyDatView/workflows/Tests/badge.svg)](https://github.com/ebranlard/pyDatView/actions?query=workflow%3A%22Tests%22) -[![Build Status](https://travis-ci.com/ebranlard/pyDatView.svg?branch=master)](https://travis-ci.com/ebranlard/pyDatView) -Donate just a small amount, buy me a coffee! - - - -# pyDatView - -A crossplatform GUI to display tabulated data from files or python pandas dataframes. It's compatible Windows, Linux and MacOS, python 2 and python 3. Some of its features are: multiples plots, FFT plots, probability plots, export of figures... -The file formats supported, are: CSV files and other formats present in the [weio](http://github.com/ebranlard/weio/) library. -Additional file formats can easily be added. - -![Scatter](/../screenshots/screenshots/PlotScatter.png) - -## QuickStart -For **Windows** users, an installer executable is available [here](https://github.com/ebranlard/pyDatView/releases) (look for the latest pyDatView\*.exe) - -**Linux** and **MacOS** users can use the command lines below. **Linux** users may need to install the package python-wxgtk\* (e.g. `python-gtk3.0`) from their distribution. **MacOS** users can use a `brew`, `anaconda` or `virtualenv` version of python and pip, but the final version of python that calls the script needs to have access to the screen (see [details for MacOS](#macos-installation)). The main commands for **Linux** and **MacOS** users are: -```bash -git clone --recurse-submodules https://github.com/ebranlard/pyDatView -cd pyDatView -python -m pip install --user -r requirements.txt -make # executes: 'python pyDatView.py' (on linux) or './pythonmac pyDatView.py' (on Mac) -``` -More information about the download, requirements and installation is provided [further down this page](#installation) - - -## Usage -### Launching the GUI -Windows users that used a `setup.exe` file should be able to look for `pyDatView` in the Windows menu, then launch it, and pin the program to the taskbar for easier access. - -If you cloned this repository, the main script at the root (`pyDatView.py`) is executable and will open the GUI directly. A command line interface is provided, e.g.: -```bash -pyDatView file.csv -``` -The python package can also be used directly from python/jupyter to display a dataframe or show the data in a file -```python -import pydatview -pydatview.show(dataframe=df) -# OR -pydatview.show(filenames=['file.csv']) -# OR -pydatview.show('file.csv') -``` -### Workflow -Documentation is scarce for now, but here are some tips for using the program: - - You can drag and drop files to the GUI directly to open them. Hold the Ctrl key to add. - - You can open several files at once, with same or different filetypes. Upon opening multiple files, a new table appears with the list of open files. - - To add multiple channels or data from multiple files to a plot, use `ctrl + click` or shift-click to make selections. - - Look for the menus indicated by the "sandwich" symbol (3 horizontal bars ੆). These menus are also accessible with right clicks. - - The menus will allow you to edit tables (rename, delete them), add or remove columns (for instance to convert a signal from one unit to another unit), or change the values displayed in the information table at the bottom. - - Few options are also available in the menus `data` and `tools` located at the top of the program. - - The modes and file format drop down menus at the top can usually be kept on `auto`. If a file cannot be read, pay attention to the file extension used, and possibly select a specific file format in the dropdown menu instead of `auto`. - - - -## Features -Main features: -- Plot of tabular data within a file -- Automatic detection of fileformat (based on [weio](http://github.com/ebranlard/weio/) but possibility to add more formats) -- Reload of data (e.g. on file change) -- Display of statistics -- Export figure as pdf, png, eps, svg - -Different kind of plots: -- Scatter plots or line plots -- Multiple plots using sub-figures or a different colors -- Probability density function (PDF) plot -- Fast Fourier Transform (FFT) plot - -Plot options: -- Logarithmic scales on x and y axis -- Scaling of data between 0 and 1 using min and max -- Synchronization of the x-axis of the sub-figures while zooming - -Data manipulation options: - - Remove columns in a table, add columns using a given formula, and export the table to csv - - Mask part of the data (for instance selecting times above a certain value to remove the transient). Apply the mask temporarily, or create a new table from it - - Estimate logarithmic decrement from a signal tthat is decaying - - Extract radial data from OpenFAST input files - - - -## Screenshots - -Scatter plot (by selecting `Scatter`) and several plots on the same figure: - -![Scatter](/../screenshots/screenshots/PlotScatter.png) - - - -Fast Fourier Transform of the signals (by selecting `FFT`) and displaying several plots using subfigures (by selecting `Subplot`). - -![SubPlotFFT](/../screenshots/screenshots/SubPlotFFT.png) - -Probability density function: - -![PlotPDF](/../screenshots/screenshots/PlotPDF.png) - -Scaling all plots between 0 and 1 (by selecting `MinMax`) -![PlotMinMax](/../screenshots/screenshots/PlotMinMax.png) - - - -## Installation - -### Windows installation -For Windows users, installer executables are available [here](https://github.com/ebranlard/pyDatView/releases) (look for the latest pyDatView\*.exe) - -### Linux installation -The script is compatible python 2.7 and python 3 and relies on the following python packages: `numpy` `matplotlib`, `pandas`, `wxpython`. -To download the code and install the dependencies (with pip) run the following: -```bash -git clone --recurse-submodules https://github.com/ebranlard/pyDatView -cd pyDatView -python -m pip install --user -r requirements.txt -``` -If the installation of `wxpython` fails, you may need to install the package python-wxgtk\* (e.g. `python-gtk3.0`) from your distribution. For Debian/Ubuntu systems, try: -`sudo apt-get install python-wxgtk3.0`. -For further troubleshooting you can check the [wxPython wiki page](https://wiki.wxpython.org/). - -If the requirements are successfully installed you can run pyDatView by typing: -```bash -python pyDatView.py -``` -To easily access it later, you can add an alias to your `.bashrc` or install the pydatview module: -```bash -echo "alias pydat='python `pwd`/pyDatview.py'" >> ~/.bashrc -# or -python setup.py install -``` - - -## MacOS installation -The installation works with python2 and python3, with `brew` (with or without a `virtualenv`) or `anaconda`. -First, download the source code: -```bash -git clone --recurse-submodules https://github.com/ebranlard/pyDatView -cd pyDatView -``` -Before installing the requirements, you need to be aware of the two following issues with MacOS: -- If you are using the native version of python, there is an incompatibility between the native version of `matplotlib` on MacOS and the version of `wxpython`. The solution is to use `virtualenv`, `brew` or `anaconda`. -- To use a GUI app, you need a python program that has access to the screen. These special python programs are in different locations. For the system-python, it's usually in `/System`, the `brew` versions are usually in `/usr/local/Cellar`, and the `anaconda` versions are usually called `python.app`. -The script `pythonmac` provided in this repository attempt to find the correct python program depending if you are in a virtual environment, in a conda environment, a system-python or a python from brew or conda. - -Different solutions are provided below depending on your preferred way of working. - -### Brew-python version (outside of a virtualenv) -If you have `brew` installed, and you installed python with `brew install python`, then the easiest is to use your `python3` version: -``` -python3 -m pip install --user -r requirements.txt -python3 pyDatView.py -``` - -### Brew-python version (inside a virtualenv) -If you are inside a virtualenv, with python 2 or 3, use: -``` -pip install -r requirements.txt -./pythonmac pyDatView.py -``` -If the `pythonmac` commands fails, contact the developer, and in the meantime try to replace it with something like: -``` -$(brew --prefix)/Cellar/python/XXXXX/Frameworks/python.framework/Versions/XXXX/bin/pythonXXX -``` -where the result from `brew --prefix` is usually `/usr/loca/` and the `XXX` above corresponds to the version of python you are using in your virtual environment. - - -### Anaconda-python version (outside a virtualenv) -The installation of anaconda sometimes replaces the system python with the anaconda version of python. You can see that by typing `which python`. Use the following: -``` -python -m pip install --user -r requirements.txt -./pythonmac pyDatView.py -``` -If the `pythonmac` commands fails, contact the developer, and in the meantime try to replace it with a path similar to -```bash -/anaconda3/bin/python.app -``` -where `/anaconda3/bin/` is the path that would be returned by the command `which conda`. Note the `.app` at the end. If you don't have `python.app`, try installing it with `conda install -c anaconda python.app` - - -### Easy access -To easily access the program later, you can add an alias to your `.bashrc` or install the pydatview module: -```bash -echo "alias pydat='python `pwd`/pyDatview.py'" >> ~/.bashrc -# or -python setup.py install -``` - - - - - -## Adding more file formats -File formats can be added by implementing a subclass of `weio/File.py`, for instance `weio/VTKFile.py`. Existing examples are found in the folder `weio`. -Once implemented the fileformat needs to be registered in `weio/__init__.py` by adding an import line at the beginning of this script and adding a line in the function `fileFormats()` of the form `formats.append(FileFormat(VTKFile))` - - - -## Contributing -Any contributions to this project are welcome! If you find this project useful, you can also buy me a coffee (donate a small amount) with the link below: - - -Donate just a small amount, buy me a coffee! - - - +[![Build status](https://github.com/ebranlard/pyDatView/workflows/Tests/badge.svg)](https://github.com/ebranlard/pyDatView/actions?query=workflow%3A%22Tests%22) +[![Build Status](https://travis-ci.com/ebranlard/pyDatView.svg?branch=master)](https://travis-ci.com/ebranlard/pyDatView) +Donate just a small amount, buy me a coffee! + + + +# pyDatView + +A crossplatform GUI to display tabulated data from files or python pandas dataframes. It's compatible Windows, Linux and MacOS, python 2 and python 3. Some of its features are: multiples plots, FFT plots, probability plots, export of figures... +The file formats supported, are: CSV files and other formats present in the [weio](http://github.com/ebranlard/weio/) library. +Additional file formats can easily be added. + +![Scatter](/../screenshots/screenshots/PlotScatter.png) + +## QuickStart +For **Windows** users, an installer executable is available [here](https://github.com/ebranlard/pyDatView/releases) (look for the latest pyDatView\*.exe) + +**Linux** and **MacOS** users can use the command lines below. **Linux** users may need to install the package python-wxgtk\* (e.g. `python-gtk3.0`) from their distribution. **MacOS** users can use a `brew`, `anaconda` or `virtualenv` version of python and pip, but the final version of python that calls the script needs to have access to the screen (see [details for MacOS](#macos-installation)). The main commands for **Linux** and **MacOS** users are: +```bash +git clone --recurse-submodules https://github.com/ebranlard/pyDatView +cd pyDatView +python -m pip install --user -r requirements.txt +make # executes: 'python pyDatView.py' (on linux) or './pythonmac pyDatView.py' (on Mac) +echo "alias pydat='make -C `pwd`'" >> ~/.bashrc +``` +More information about the download, requirements and installation is provided [further down this page](#installation) + + +## Usage +### Launching the GUI +Windows users that used a `setup.exe` file should be able to look for `pyDatView` in the Windows menu, then launch it, and pin the program to the taskbar for easier access. + +If you cloned this repository, the main script at the root (`pyDatView.py`) is executable and will open the GUI directly. A command line interface is provided, e.g.: +```bash +pyDatView file.csv +``` +The python package can also be used directly from python/jupyter to display a dataframe or show the data in a file +```python +import pydatview +pydatview.show(dataframe=df) +# OR +pydatview.show(filenames=['file.csv']) +# OR +pydatview.show('file.csv') +``` + +### Quicklaunch/Shortcut +**Windows** + - If you used the `setup.exe`, you will find the `pyDatView` App in the windows menu, you can launch it from there, pin it to start, pin it to the startbar, open the file location to + - If you used the portable version, you'll find `pyDatView.exe` at the root of the directory. You can launch it and pin it to your taskbar. You can also right click, and create a short cut to add to your desktop or start menu. + - If you clone the repository, you can create a shortcut at the root of the repository. In explorer, right click on an empty space, select New , Shortcut. Set the shortcut as follows: +``` + "C:\PYTHON_DIR\PythonXX\pythonw.exe" "C:\INSTALL_DIR\pyDatView\pyDatView.launch.pyw" +``` + +**Linux** + +You can add an alias to your bashrc as follows. Navigate to the root of the pyDatView repository, and type: +``` + echo "alias pydat='python `pwd`/pyDatView.py'" >> ~/.bashrc +``` +Next time you open a terminal, you can type `pydat` to launch pyDatView. +Adapt to another terminal like `.shrc` + +**MacOS** + +The procedure is the same as for linux, the small issue is that you need to find the "proper" python to call. When you run `./pythonmac` from the root of the directory, the script tries to find the right version for you and finishes by showing a line of the form: `[INFO] Using: /PATH/TO/PYTHON `. This line gives you the path to python. Add pydat as an alias by running the line below (after adapting the `PATH/TO/PYTHON`): +``` + echo "alias pydat='PATH/TO/PYTHON `pwd`/pyDatView.py'" >> ~/.zshrc +``` +Next time you open a terminal, you can type `pydat` to launch pyDatView. + + + +### File association +**Windows** + +To associate a given file type with pyDatView, follow the following steps: + +1. Locate `pyDatView.exe`. If you installed using `setup.exe` or the portable `zip`, you'll find `pyDatView.exe` at the root of the installation folder (default is `C:\Users\%USERNAME%\AppData\Local\pyDatView\`). If you cannot find the exe, download it from [the repository](/_tools/pyDatView.exe). If you cloned the repository, you'll find the executable in the subfolder `_tools\` of the repository. + +2. Verify that the exe works. Double click on the executable to verify that it lauches pyDatView. If it doesnt, run it from a terminal and look at the outputs. + +3. Add the file association. Right click on a file you want to associate pyDatView with. Select "Open With" > "More Apps" > scroll to "Look for another App on my PC" > Navigate to the location of `pyDatView.exe` mentioned above. If this works, repeat the operation and check the box "Always use this App for his filetype". + + +### Workflow +Documentation is scarce for now, but here are some tips for using the program: + - You can drag and drop files to the GUI directly to open them. Hold the Ctrl key to add. + - You can open several files at once, with same or different filetypes. Upon opening multiple files, a new table appears with the list of open files. + - To add multiple channels or data from multiple files to a plot, use `ctrl + click` or shift-click to make selections. + - Look for the menus indicated by the "sandwich" symbol (3 horizontal bars ੆). These menus are also accessible with right clicks. + - The menus will allow you to edit tables (rename, delete them), add or remove columns (for instance to convert a signal from one unit to another unit), or change the values displayed in the information table at the bottom. + - Few options are also available in the menus `data` and `tools` located at the top of the program. + - The modes and file format drop down menus at the top can usually be kept on `auto`. If a file cannot be read, pay attention to the file extension used, and possibly select a specific file format in the dropdown menu instead of `auto`. + + + +## Features +Main features: +- Plot of tabular data within a file +- Automatic detection of fileformat (based on [weio](http://github.com/ebranlard/weio/) but possibility to add more formats) +- Reload of data (e.g. on file change) +- Display of statistics +- Export figure as pdf, png, eps, svg + +Different kind of plots: +- Scatter plots or line plots +- Multiple plots using sub-figures or a different colors +- Probability density function (PDF) plot +- Fast Fourier Transform (FFT) plot + +Plot options: +- Logarithmic scales on x and y axis +- Scaling of data between 0 and 1 using min and max +- Synchronization of the x-axis of the sub-figures while zooming + +Data manipulation options: + - Remove columns in a table, add columns using a given formula, and export the table to csv + - Mask part of the data (for instance selecting times above a certain value to remove the transient). Apply the mask temporarily, or create a new table from it + - Estimate logarithmic decrement from a signal tthat is decaying + - Extract radial data from OpenFAST input files + + + +## Screenshots + +Scatter plot (by selecting `Scatter`) and several plots on the same figure: + +![Scatter](/../screenshots/screenshots/PlotScatter.png) + + + +Fast Fourier Transform of the signals (by selecting `FFT`) and displaying several plots using subfigures (by selecting `Subplot`). + +![SubPlotFFT](/../screenshots/screenshots/SubPlotFFT.png) + +Probability density function: + +![PlotPDF](/../screenshots/screenshots/PlotPDF.png) + +Scaling all plots between 0 and 1 (by selecting `MinMax`) +![PlotMinMax](/../screenshots/screenshots/PlotMinMax.png) + + + +## Installation + +### Windows installation +For Windows users, installer executables are available [here](https://github.com/ebranlard/pyDatView/releases) (look for the latest pyDatView\*.exe) + +### Linux installation +The script is compatible python 2.7 and python 3 and relies on the following python packages: `numpy` `matplotlib`, `pandas`, `wxpython`. +To download the code and install the dependencies (with pip) run the following: +```bash +git clone --recurse-submodules https://github.com/ebranlard/pyDatView +cd pyDatView +python -m pip install --user -r requirements.txt +``` +If the installation of `wxpython` fails, you may need to install the package python-wxgtk\* (e.g. `python-gtk3.0`) from your distribution. For Debian/Ubuntu systems, try: +`sudo apt-get install python-wxgtk3.0`. +For further troubleshooting you can check the [wxPython wiki page](https://wiki.wxpython.org/). + +If the requirements are successfully installed you can run pyDatView by typing: +```bash +python pyDatView.py +``` +To easily access it later, you can add an alias to your `.bashrc` or install the pydatview module: +```bash +echo "alias pydat='python `pwd`/pyDatview.py'" >> ~/.bashrc +# or +python setup.py install +``` + + +## MacOS installation +The installation works with python2 and python3, with `brew` (with or without a `virtualenv`) or `anaconda`. +First, download the source code: +```bash +git clone --recurse-submodules https://github.com/ebranlard/pyDatView +cd pyDatView +``` +Before installing the requirements, you need to be aware of the two following issues with MacOS: +- If you are using the native version of python, there is an incompatibility between the native version of `matplotlib` on MacOS and the version of `wxpython`. The solution is to use `virtualenv`, `brew` or `anaconda`. +- To use a GUI app, you need a python program that has access to the screen. These special python programs are in different locations. For the system-python, it's usually in `/System`, the `brew` versions are usually in `/usr/local/Cellar`, and the `anaconda` versions are usually called `python.app`. +The script `pythonmac` provided in this repository attempt to find the correct python program depending if you are in a virtual environment, in a conda environment, a system-python or a python from brew or conda. + +Different solutions are provided below depending on your preferred way of working. + +### Brew-python version (outside of a virtualenv) +If you have `brew` installed, and you installed python with `brew install python`, then the easiest is to use your `python3` version: +``` +python3 -m pip install --user -r requirements.txt +python3 pyDatView.py +``` + +### Brew-python version (inside a virtualenv) +If you are inside a virtualenv, with python 2 or 3, use: +``` +pip install -r requirements.txt +./pythonmac pyDatView.py +``` +If the `pythonmac` commands fails, contact the developer, and in the meantime try to replace it with something like: +``` +$(brew --prefix)/Cellar/python/XXXXX/Frameworks/python.framework/Versions/XXXX/bin/pythonXXX +``` +where the result from `brew --prefix` is usually `/usr/loca/` and the `XXX` above corresponds to the version of python you are using in your virtual environment. + + +### Anaconda-python version (outside a virtualenv) +The installation of anaconda sometimes replaces the system python with the anaconda version of python. You can see that by typing `which python`. Use the following: +``` +python -m pip install --user -r requirements.txt +./pythonmac pyDatView.py +``` +If the `pythonmac` commands fails, contact the developer, and in the meantime try to replace it with a path similar to +```bash +/anaconda3/bin/python.app +``` +where `/anaconda3/bin/` is the path that would be returned by the command `which conda`. Note the `.app` at the end. If you don't have `python.app`, try installing it with `conda install -c anaconda python.app` + + +### Easy access +To easily access the program later, you can add an alias to your `.bashrc` or install the pydatview module: +```bash +echo "alias pydat='python `pwd`/pyDatview.py'" >> ~/.bashrc +# or +python setup.py install +``` + + + + + +## Adding more file formats +File formats can be added by implementing a subclass of `weio/File.py`, for instance `weio/VTKFile.py`. Existing examples are found in the folder `weio`. +Once implemented the fileformat needs to be registered in `weio/__init__.py` by adding an import line at the beginning of this script and adding a line in the function `fileFormats()` of the form `formats.append(FileFormat(VTKFile))` + + + +## Contributing +Any contributions to this project are welcome! If you find this project useful, you can also buy me a coffee (donate a small amount) with the link below: + + +Donate just a small amount, buy me a coffee! + + + diff --git a/_tools/Makefile b/_tools/Makefile index 601adc2..b813bfe 100644 --- a/_tools/Makefile +++ b/_tools/Makefile @@ -1,8 +1,17 @@ -all: compile test - -compile: - cl pyDatView.c - -test: - @echo -------------------------------------------------- - ./pyDatView.exe AA BB CC +all: compile test + +compile: + @echo -------------------------------------------------------------------------- + @cl /nologo -c pyDatView.c + @echo -------------------------------------------------------------------------- + @rc /nologo pyDatView.rc + @echo -------------------------------------------------------------------------- + @link /nologo pyDatView.obj pyDatView.res /out:pyDatView.exe + @rm *.obj *.res + +test: + @echo -------------------------------------------------------------------------- + ./pyDatView.exe C:\Bin\test.outb BB CC + +clean: + rm *.obj *.res pyDatView.exe diff --git a/_tools/pyDatView.c b/_tools/pyDatView.c index 15f9cf9..c66b963 100644 --- a/_tools/pyDatView.c +++ b/_tools/pyDatView.c @@ -1,85 +1,244 @@ -#define _WIN32_WINNT 0x0500 -#define MAX 1024 - -#include -#include -#include - -#include -#pragma comment(lib,"User32.lib") // for static linking and using ShowWindow - -char* concat(const char *s1, const char *s2) -{ - char *result = malloc(strlen(s1) + strlen(s2) + 1); // +1 for the null-terminator - strcpy(result, s1); - strcat(result, s2); - return result; -} - -void concatenate(char p[], char q[]) { - int c, d; - c = 0; - while (p[c] != '\0') { - c++; - } - d = 0; - while (q[d] != '\0') { - p[c] = q[d]; - d++; - c++; - } - p[c] = '\0'; -} - - -int main (int argc, char** argv) { - char mainCommand[] = "\\Python\\pythonw.exe -c \"import pydatview; pydatview.show();\""; - char wd[MAX]; - char args[MAX]=""; - char fullCommand[MAX]=""; - char path [MAX_PATH]; - char* pfullCommand ; - - // --- Hidding window - HWND hWnd = GetConsoleWindow(); - ShowWindow( hWnd, SW_MINIMIZE ); //won't hide the window without SW_MINIMIZE - //ShowWindow( hWnd, SW_HIDE ); - - // --- List of argumenst to python list of string - int i; - concatenate(args, "["); - for(i = 1; i <= argc-1; i++) { - concatenate(args, "'"); - concatenate(args, argv[i]); - concatenate(args, "'"); - if (i +#include +#include +#include // stat +#include // bool type + + +#include +#pragma comment(lib,"User32.lib") // for static linking and using ShowWindow + + +/* True if file exists */ +bool file_exists (char *filename) { + struct stat buffer; + return (stat (filename, &buffer) == 0); +} + +/* Returns path of current executable */ +char* getpath() +{ + TCHAR path[MAX]; + DWORD length; + length = GetModuleFileName(NULL, path, 1000); + char *result = malloc(length+ 1); // +1 for the null-terminator + int c =0 ; + while (path[c] != '\0') { + result[c] = path[c] ; + c++; + } + result[c] = '\0'; + //char *result = path; + return result; +} + + +/* Concatenate two strings */ +char* concat(const char *s1, const char *s2) +{ + char *result = malloc(strlen(s1) + strlen(s2) + 1); // +1 for the null-terminator + strcpy(result, s1); + strcat(result, s2); + return result; +} + +/* Concatenate string2 to string1 */ +void concatenate(char p[], char q[]) { + int c, d; + c = 0; + while (p[c] != '\0') { + c++; + } + d = 0; + while (q[d] != '\0') { + p[c] = q[d]; + d++; + c++; + } + p[c] = '\0'; +} + + +/* Launch pydatview using local pythonw and command line arguments*/ +int main (int argc, char** argv) { + char wd[MAX]; + char args[MAX]=""; + char fullCommand[MAX]=""; + char path [MAX_PATH]=""; + char pydatpy[MAX_PATH]=""; + char pythonwpath0[MAX_PATH]=""; + char pythonwpath1[MAX_PATH]=""; + char pythonwpath2[MAX_PATH]=""; + char pythonwpath3[MAX_PATH]=""; + char pythonwpath4[MAX_PATH]=""; + char* pfullCommand ; + bool useImport = true; + int index=0; + + // --- Hidding window + HWND hWnd = GetConsoleWindow(); + //ShowWindow( hWnd, SW_MINIMIZE ); //won't hide the window without SW_MINIMIZE + ShowWindow( hWnd, SW_HIDE ); + + // --- Get user name (for AppData path) + char* user = getenv("USERNAME"); + printf("Username : %s\n", user); + + // --- Executable path + char * exename = getpath(); + printf("Exe name : %s\n", exename); + char *exedir = exename; + exedir[strlen(exedir) - 13] = '\0'; // remove pyDatView.exe from path + char parentdir[7]; + strncpy(parentdir, &exedir[strlen(exedir)-7],6); + printf("Exe dir : %s\n", exedir); + printf("Exe dir-7 : %s\n", parentdir); + + // --- Current directory + wd[MAX-1] = '\0'; + if(getcwd(wd, MAX-1) == NULL) { + printf ("[WARN] Can not get current working directory\n"); + wd[0] = '.'; + } + printf("Current Dir: %s\n", wd); + + // --- Get PYDATPATH if defined as env variable + char* pydatpath = getenv("PYDATPATH"); + if (pydatpath) { + printf("PYDATPATH : %s\n", pydatpath); + }else{ + printf("PYDATPATH : (environmental variable not set)\n"); + } + + // --- Pythonw path (assuming it's in system path) + concatenate(pythonwpath1, "pythonw "); + printf("Pythonw1 : %s\n", pythonwpath1); + + + // --- Pythonw path (assuming close to current executable) + concatenate(pythonwpath2, exedir); + concatenate(pythonwpath2,"Python\\pythonw.exe"); + printf("Pythonw2 : %s\n", pythonwpath2); + + // --- Pythonw path (assuming user installed in AppData) + concatenate(pythonwpath3,"C:\\Users\\"); + concatenate(pythonwpath3,user); + concatenate(pythonwpath3,"\\AppData\\Local\\pyDatView\\Python\\pythonw.exe"); + printf("Pythonw3 : %s\n", pythonwpath3); + + // --- Pythonw path (using PYDATPATH env variable) + if (pydatpath) { + concatenate(pythonwpath4, pydatpath); + concatenate(pythonwpath4,"\\Python\\pythonw.exe"); + printf("Pythonw4 : %s\n", pythonwpath4); + } + + + + // --- Selecting pythonw path that exist + if (strcmp(parentdir,"_tools")==0) { + exedir[strlen(exedir) - 7] = '\0'; // remove pyDatView.exe from path + printf("Repo dir : %s\n", exedir); + concatenate(pythonwpath0, pythonwpath1); + useImport =false; + printf(">>> Using Pythonw1\n"); + + } else if (file_exists(pythonwpath2)) { + concatenate(pythonwpath0, pythonwpath2); + printf(">>> Using Pythonw2\n"); + + } else if (file_exists(pythonwpath3)) { + concatenate(pythonwpath0, pythonwpath3); + printf(">>> Using Pythonw3\n"); + + } else if (file_exists(pythonwpath4)) { + concatenate(pythonwpath0, pythonwpath4); + printf(">>> Using Pythonw4\n"); + + } else { + ShowWindow( hWnd, SW_RESTORE); + printf("\n"); + printf("[ERROR] Cannot find pythonw.exe. Try the following options: \n"); + printf(" - place the program at the root of the installation directory\n"); + printf(" - rename the program 'pyDatView.exe' \n"); + printf(" - define the environmental variable PYDATPATH to the root install dir. \n"); + printf(" If none of these options work. Contact the developper with the above outputs'\n"); + printf("\n"); + printf("Press any key to close this window\n"); + getchar(); + return -1; + } + + // --- Convert List of argumenst to python list of string or space separated string + int i; + if (useImport) { + concatenate(args, "["); + for(i = 1; i <= argc-1; i++) { + concatenate(args, "'"); + // replacing slashes + index=0; + while(argv[i][index]) + { + if(argv[i][index] == '\\') + argv[i][index] = '/'; + else + index++; + } + concatenate(args, argv[i]); + concatenate(args, "'"); + if (iqd`DXQQ{&IR~C`<0E$XN zCrc*Hxa{igx);~WKKnkdd&Auo5_DlEfn2x-yajP3=&sf{s8KWoQ2YI!>YiK>_ucpT zeLlZGUNqCybvbqF)Tyd-PMxX_-nLmt5d^`8KV27u20ZDXm;e6zAE!kShK}7nRQOxk z{&5YK!2WUbm)%?GUb*W2dsf}`efN^PR;;*RaesHIdsS$K``#69-;ML!-@pIvrBkx9 z(sNAEPwt(%%{B)={qqC&+WTk`RE^bK6cD^;xvDjp4iWy z$$Z`uNAUjk3*L_!@LYPV^aSN!dTa+TxAa&Ef4}M8CCjL;Z);gusUY0_gH&PXyFa}v znWtO0RLHOn9V67C!6S)UpwBq`IE^4;jj&Y^hVZEVr_c?`5^?b_MJTUM7Bb5s)&P=x zP(Cw;f+~ zVdZ!vl%e7a@Vsg;0>~GgveL-N+laObBau)3Mn3Q2n|#uINf5q7vhWPjDa)PT%r|9K z<*Fq};} zYx8poWi61C&+c0Qx~sc#oA(X+m(03M*6gy+D#!)98-vTH!(uOK_Q}=`Y@h6A%|XhLJnmSxB7B&sPsa)U?0mJ%Bx;>PSqR_x%z~RUP0}9 zO3d18a`F))H86}!LL+r+CUyjNzSg6ds_Umxm& z=*vm#Wl#D|M5x6w3)3qBl_NfjMFyrC*o-jcn0P78&4$aAYkrPwyxu*VUMY*ZtFW)O zeftL0m)&aPgqD}sICarjn+0u6*K8n4*p`#{7ia-=0SmOSaV*R{Q;nDMk$lUp^=c4pWaV)Lku)AtsU~ z7Fz{&z#Il;)URcTO{3V?KwS3pm%2^`y_Dod(ojljvoXLvc+A%tb% zK)uf%CK3X^!3tTsDc@Sk2QX;+rX1n`b~DfA+G&>LakI>l28(39OeL=|YG~#8Xl@-t znE(p?Mz2E+INSQtzdB3R&nGU*BD~m_#EM2!W|&RcltX;P4)d(8W?t5e+ri_W;28jc z@MGHvLd12dh+0Wqd8c;qrN&3qzY3B11aWL6 zYYgSAGs3*ShmCNA;#V5sMhf3;geTJgTWEw|A~*pf{1N5xBFsLR01n2As>fdh=g>mw zu8oQ{lQ_X#KSvp&8_khS0JfL~_==jKNtz~k{Ax8h0z^i=tq3pT50CQJyH*b@x*J4j zH75|7_;Y9>#NzfujsqoS&DMlv!GRu`{q3o0@1oG?^C!3198)<)xg4Y1#4n9}+9nhJ zh*)1MW&joN+t`8VU529T$i3SKa< z;EO2u?PxwusS#K2)dM+JdKazxi<8oU z9I>}AwD!wO0mugK0Jt`LkR(&cUZt+3aYqs;#F0Qw8ObCNhiIwN9BfY^pbr*?Aa@w@ zDhat9CDwKm?)7F5q3AaoF!3+(?0?5Ul(W; zJ;L@>WN)>91cBdZ+Xo04ZTr*=CDEm}#SzE3KRm*M*Iz(`WkCbu+-5nvD93#Zu=b+w zbk`W*CTlPE5-M0Wv8Eja1L&?SibvmEDXVuw44?&OA+ZH`u8q1eKcKbp)sS9t3)(qY z-Sy4}0=Fn<8K~psJno%$bAN4r=E;Ah+6r`HG%o*W2Xxm{l!E$-`s-Up^_BP6*TPr1 z!S$^`F}+pQtq;G*YrOMz8duaOK}~|TvB$3+Vp*upkb!vb?Azy^SE^;p+FO$PUT;8a zmwE(SKs%%imo%>sxy#sNM6zw7=OsWr1^Sngj3c^oSN*w!_nijq=-reCNE2h z+SMIMX$usfn)ch{)SsrLUr`~Bt-&j(s-D2e8<7(L-5tS#qX9HIJCN5A6lcAXT=he!Q6;k!PLU%CoOQBoWrIr+m4Rr@Jlm$hhp6+rcCyjjr;Cr%S)2%|tDaT^Y zic>LN4_TV!Si3X+7-S=AD==^yV>}0n7P|-YAyll6n-RW|!kw&cp&;-a;cV815m6IW zD%r~>>E|Z?&h9`Yx{X9aD}rnsRpF-4#q{$r3Zk-&w;((O z5rAxGv77Mr8iD*F#tbB>A!wWX_U1-ZC9tW;X7(X_51GIv)NRzK>}6Wzf5Bl=Wc)XP z%jYT)=11B>S>EuGaeDO?V;;i z?K*}UUeIP&E-`;3>o}N;RRLKvFueh`K_Py`$gq>RHInr_WjH&R=9m~t(_E5pc(+=3 zUG;m}Gl*fcfv&7|$|{8EOGvr>47Gp`7l8kzNSPI5M@!LNK7uuVK;99Qx5dZ{#>-%D z%m=jTs12GAt0wD&mdzf(lZK-9Dpr7JK82Lf$s~i&3%oTwAv;r1tl;pA7DmPcG=UWj3|h9%i@b!Px`>PA75#7@D@wr6#CNO zSE#ls5^LLErY;eE&2{!lyt7{eakc;=XiYR^yE2*>!M?nWzoZqkv!#end{_@wA#Un} zRW-8^MFs6h=pJ6^8SrY792*8Wz-xErOF#!XJ(;YoRmhMs+pf4MyNlc*lG@kGTBc-A~5+U=;vgE z`)Dq!Gxs1f1ePPARJIbsG$DdVLEXP0n!Sn$s!JAqugY3evcrrqPIAtv&Wod`I`uZB zU_IrF%K}+Dfedb}iXHjD?By$;J2v^}dEhHgb4ZcZrP&yg!ED|2+{5T9trYT8sZ)1t z!kZSAzeC}*MmQ358}H@xUg}0M%${9txd7pUQm0%HlN*5r{sH=UycG0T1(ajx?I59m z@gmEoI`DcGy@FPEG#5mh_wmIB4A>xexg`wE{(_7<`h=mVao@fzmgap;U<$~7A0ghD z+fIH#GKC|Dy^nekuo`VIh!)vTOtlK)6VYbmf9VFwAF=tMHnTJTtF}>;3grli2!J|ZIlRcqD&6g zQZcCd6K%&x-9J_vSX%;iJCeUX`NJQ5dRxuFIP(On(X*=n;x_8eOk z=YWZa7aRO!;1wBj*3Qd*j>o{VPyCy*PFjnz34pU8TF@>&=hI@P1xJ0_Ua7i!g|c+H zRQ>7fr7J={?XcEe&@M$vETtK3;&W2JQs;N;CF=r)}D4FpmSuOD3!>YSt%{ zFr`fBzE&4!Mu%7gC3j{yTBMAdxso0i*|(hjQdfK3YjR|W0Q4V-BmO2P@2+I`wlsy$h^xc$Af%JkBV7|Jd{kyiWd zK!l+C!dVLu6t{mcLR5cbM3f+cOtMM&ytb}NNyo4eKiPrRa&k?<};4cJiY@Qv^E zc{)Sj06@y=D((tpsaefP^@hhZ;z7Nd3wFV(o{!FTa+#dwlttGA@1I~!p}9j2FQ-iq z>NIRBEs$=OsPnwC77uEt{aTw}gPi;!+l8SU)b4QvwX6Nw;=*9w=RKvi3Q1cdi?we9 zF9MS`I zlF%>wDZBjI>v~6RbnPRLD%Fn&d}!oiVTohxkSG<&>OzRUd^exohHmHGt?k!c?TFR_ zXPpwChy5x00EoQ?iu$$L-hj4Sn}62n*Y^9gpx{q=jqAWjfffc5>aNFE1Lzybt+&Ld z(?l7K?*@K;ZN1y4W%%>Hlsun`>K_qRs>QT7BydJcuIp*(<*i2|XVQV1`T5@RCQSzfeA;n}i}%AuXXlwFeE0Yp&>W=1UrWH;Q5 zW^h^rB9QR>+U@y%?Y=_4R*CR?(qidO=?-ZTI0Jp2%hsclP3h_aHpF;IWltfHIKkI> zLl&=T52drIh-Dk_VIu2Ji1*q&^+8N%(8%1A-S)>vVLn>}-jecZ z8oFAB!c}`%878O9EtEBXF1rFGOt=%5^axG`v!av>;|4*+)CXnUi*-DE4b5A-WRme@ z{aq7ptb|xR+{UtL#NB7afosm$$LrUhZh`p0dHm!Heyuramg!FqXh+1Yhy9Hf^It&x z^B%w2>lA2}r#tkT zU+r3NK(Q&q#j#XCKs8hc3WPpKx~L49B@Xjhy3x31ajaJm^DJUsKm0tv&okiX0e&9f z=Sf}o>khpx{;8L0Nc;-T>CtC6%dnV*QZc=3MhV?Deide|tTBj%dUqY>A6ea_7xWNz_s~GG z%UXBh4lV2K1PIV8kdC+;wpXESc6kxVLLwj2t2IzF%{$s>(up~<*!J2xbHby~e0^48q5(M@3_Y*Z1#Iv#F_+< z0KLW9e>NTK1v^#P@3lr3P}e z&|S5_&gi+&&ARJCys5pngeLnUC8@{-3NMEO%BC*m^c@M}@Y1>~*kA4vzt#eL^9bLK zHN0+F-EH%GPQ>rwJ>QJftbwWMsx)r4J$1hY_`c4=ClMwF!gSl_-?)i4HIW|N_%|LR zQDPC9373;SP{Hmc*kiyFV)g^{KFkswRcK{94*4XX^ksO)Jio)!#zfrlF*E8jZ>$s2D^9^+E#o0>F_y<{)9_&|TjHtOTU( z4S+L1$o}S>AZgtId~!ViqXzrC2w-VK6I zmS!IM*FRt0ax=lyK7&+u^1tr<=6WYrF!gVIeF#Dtuu%(^pqEv>EA0Lx!!_rz5QbiM~1+ll!*x zim;=3vzCssvuu<;BuwSJHlz=sa!I%YKvFSeon=K?U!B+5U;kFvKzP+CotdoFtdPHM zz21S>I@w`@HgY$c^_x}m*R8W`9J&n_v2+@PrJB5LYHk;XMV>GabDc;9ykwDg5VZ})gvh&WD9Gu zdhFIKAs&decY~RCe|^7o*Rzk_zuekI89;gQ2fK)~w06IGe5_xMyZw3X{=7rMlvrPe z-ynktPTY8n*<1bn4VdZpyUC58+r0SsZu#!7)~Dfbt{XqMdhv5tIpXW900_??KVv+9 z5jCFAbWGiRf>B>g>V^rm$9U+M_0-;`#C5a;fTGed~;_QjbY#Fh8OnW6gw=~VDDX{seE@oWK=k^6==H&WLaz@@dVOHh z>jRTsANyZz>NiT|X8x-lI7sZ`RFNzyPuSY%@NH2Qprx(2qq!+|PPBNMu z`uUAm5i8niaCR=tR^aS<*rYICtTbMhTf32=3LGQ;;4%CfoGpU0odY?WGOT`p%ZGfV ziTB^+MBp|ze!iPuff0wlxla7t>c-Dq`G~*Xi=UgyC|fyYTZsg}m-3ZSzH-X9(iE$~ zr&I;_)Oc`=DxUz40+r+429Mf^M?3pyPUwTDXsClm#@lA&jUL2d#v48Gw#9g(2l1Qn zMi0Do7;p5zTbJ?1AImK?p$N@=@;3C3fOc5UI|Lb9boz#M%S0=cIeTKFav^4G(k4VF z(rm-TVGfv>gL%zK)#83}q9rk6Q2#)4aEcStI4IE>7}h0M$B77tUnx%Xms3I+C3p>; zqYRN=ip-}-H?rj;#!WF!(V00c^e2KydV-j1sx&s`qJDnh`bZpEZNo?>>E=3#LlT|o zk~k~_sic_jHEQ5be|eGxNydj=0e(m%<9lTd0&R?0KL>|*usl2GLQ)-Bx(;BL+CpA5!RQ2Wf-Okq{2UQ%r;(E(9I|8A zBN39dgdCav-=m=JMW)x`;g8&q4>8mb&OJ*2S+h4FKLnQ|ZX17Hj-4422v19qwcTph zwQ?jg>ep-?XnY&=@fgh8r>$#-v<@LHLDqNs)s}_a7RjHVSVF%xM9q8>HPNvO^*emw zNV=2mY~BZQMzVBhBG}MhD5u1ThYVSdWi0~(Y`GgvKZ?!!@w|nZ8SLwOIFiGS_Y~le zueSGM~4Iwa=pONt@eqFXq-I*k6>beysxn-EXJx z4bRn=`mq@j>m35|Pd!eZ(vD7P!fsL<+0#JIL^=X*B7mt`s(P?U2u+Y8UMr03t4GpS z4a%zTQbA}hwkAB=dZd{aC6Ap1a%S7xu*U_lHIpMlzPmr0YY{eY207xi0uh3{Np|n0 z(4Q&P8o!(pFxIh;Hx4m0qKRb35jJ#Z?zzH^(td~#R5qR6gVfE$zE%tdippT+9Nc;3 z>^>VO4I{Y$71F=;BoBK!R*URAyaj|c_Qw`M^^Q!sKrVXG0WmI=CRcwYAh0@3_WX_l zYsj>JwXE%DpMln<(mh7jen{EbqW~p`pQbSOgXQp_sR)`@fp~9yQoHVBZ16Nm;#?OE zMCLA(v`)XaflB+dx1dHO2YFUN!vLyYhy?i%3r1$?fs|+#RCc^k}A3ADd>0cG+#IR=|+tI;X7r<-~4Zo+J~w9KNtrChHQc zYKBg%RhOe4I7Wc=38TPZR7gG4HkhacA`+nz49}7!Z5GER2wc8HxJcnyur)Tz9Gk#- zu<>iNfQ?_jUQRTX=FKv&DGkpm^Xs#?jSr?%@v&;oP2Vypf+c+7j?W~G$=as{;GQ9x zJR%m)?1@>FbiB#xT8E%GWo@LS9birsV-uE(A$p3Ik{F_9=wMAFqSf=&!j zK5}>{LUlZJBamxjdlbZKq8z!!$v!i}TJ`f_?&qyA?6+CN{H0}fw#CE|JO2Z0G_d7k z9)THu!7W%UupgLD{;}03Xp;}~^OM6dUDA1l+9*JBbLtUU!{mC9g*H&cJO|m{-QN0c ztfXuhMhD{DhF_AFQCiRwprsc!gL&A?23_Ylp{B{<2w})~*#fA%jDD8U&q^PD)5uP*dNApFvk$czX1;PT~>@72-hUKqdH9tD&c)d2 zGychFfVLUh7%Hmp{d=P11}*%208uWRdk4qG25W5Fun*VB{S( zsr30{y^q6DmP2r6GlGq7yU)n&2tkZG^QkphtJCQF?)81bU! z(8o#44E#W6UOjb(zna%VBxq>;Ga% z0DU&qhjW}s43DS>!Ls3H#P3)=$Fn<7kGU#cIUn4C@xsS8_NWX<&Fn4A_~^`k%Gh=l zu!vvLUqqj0EaH1~btOQmFVnA8tifpll7m*lyZlyPI z$2=)AtE>37HA6x1F38A|7HF|0;Pa!EOv2Ks9!HGrK)7U*+UpQ&Zoq&e-{w=Gp~~Q$ z^m;PzGb5nfgJ|Ve`pKoAkyw_+TE28*C9TC!h&OZk?_~}QAMWQ}b><2(SkJ>S-^(t; z>KK29vZy6)2i)1*n%rC4F3JbPa|#pr>XJes9!4@1f}hE}X^HlMIm2_N)wVyFrq0Q? zojQUSEP*%IH&vZ+E4jtvl}n2Alo7>~m2@!uY~*nyB+8la;MbCvj$e0TDt@hriRzMR zpjtluiiMUEuMl2W*itfINMI6#Ul9K za>j*%)R=<(99&z7ehgqC2q^~Rv#Ct zAf=<{v44Qg241yRHX`<6W>1qv8H@~Ezc?z`TV!{C0~>=RkRU-7Ib4#BowyM!1UEGk zyX!f232UE;o7>#$C8_wj(5N1ZL#!oMZIvd`&Nmmb0(dXJA#^!UoNgqN7Qx2$L;bvr z+8iH+RQ17PA@p9cU#!`JOvM4Q=4pD)$GD*9GG%%3_moA&%av)xijq^j22AmgSo;s8 zwMxZ9@Q9O%9AOf*_~t6YgiO2r2W=CXJc2uo%WadAr5A!A;e`yP&Bu6{Tq_qHp_x%TLDKNsF-?^C#a= z&qp7CN2Hxp)Vp=9_oao>R;l>z^*4((uOLdAf&GLQ1d2=h`F2YQo9Zp+^(hsz2YkooEQ1kTKwb(@J*$5iDq_V4DSKa@9JoAlAwV zY4hO0QUTSttU$+!ubZ@{7_$-hDHIyQE5Z?(vdM{yq?$LCR0`)MhH6cR7^OMr-R;-( z`Z*A4y;tmh{#Zz)k{m@lFi)hEUKy>ANmsNye+-UOrNF|5DgNRXUvXK!cgB9jF3mU) zO7Yfz0awl9xw6`7)$W^?cVAiFCp}G>Ay*(WC)cO#mLnR?2!C<2hv1wQi2Rk_{E#Li zmAS@5BGnvJhEfd4mm!1Qi5gjfDcOzTIl9p)qV(_1zPBi@hV=#M2^?W&7NTO;v34+Toobzcn zCTZ8-w@*Etv1W?A1Y(D4*h`ovH*LtC1IA6+qaQci)c8PY=A*2cdy$h1TgB#7b1cGqZgi`5UUid+2<4j$^6H01)! zAX$nN!(q~r=RXmD(v%qT7xI%j90>bmN!%vX99FV1m0Aw51f6rqzRw~Ur|H6TWtw84 zsgMsH$>QAQw~W{iLyN0H4~`_jQqHj+I1#E{r`2c{)S<-a&_z`bO7|#^f+(J|Lz&=r z8i^ju{CEf@Hkn0nN%UzLL@B;1zA%~wa5M&EIHv4EUg^PCPmaf{7hkIRSdy0Q4cnr3E6|Le!YJD4=OP z9{Z0*RsKWqKuWv+5VV^Thqz;o1P?eGNSLv>JuHQug<44=9{G@@;dqCGYNm_Pn~Xv6 z&`R<#K%TcK!_{?#P))e7ug#I!FTry4CDNqX7)2K*a}7v8%I#}&!(^uq)OVqmi4Bp;$cGQRCpeGjASoj(K4_nUpMoq z&-}$|+^URL;}&I%dOA&k1O@K6lW^kP!F^RRdIJM!`Z&=P8%J_4dAMd8T?Kh%VIsYc zWSm%XWKJ0$hfISV1#CG5;)W#w0+lQbXjRl}VAg=>i<;2qxa;TkN_EcGk}^)<$ee|Q z_>YYKJ&gW6B!}mre|O~@XAQp1PDe9+Jwxk!+GlJv>0vNwXk@;%D39`A*t^j1X)uJ@ zlndEkG1$x%yw1oQm^@DYN{;mL`sgh z4ZoM1H+=_EY~t3nz2+o^`XH9n>abm%I;OUJG3yiOJd)O0!pBWwxV&Uup?drj5E{GBgRru%@}oEnLu07 zbL6U`eAKGXk?EzylCPJTd;?bLGnWjs19E+wn}y672pX-#sl=p8ZXg}O7J{B8X(!9v zy5rO46tm-WxRJ!@JpMZxzgBEZtG%|+K6yzA%+9F^7X(W@hKk!taw`z9*O%nhr4kPr z?2O(qY&V7+I1mRP#v{psGDK`LHm%p@VNnO8VeHU+IGX0eF*F|<9H0LUj=zMag$&r^ zQY&DkL^1!84iMJDeh*$wWG7Qk<1n3>$|M+=!klKnix`w_YMPX^4_0H8vOhw>2`p)Y zRqzuV8EnVYU4;EyEha@IZs)t$uf^czL-yN#!!3NbN3=o33Ww226SWOEBrnRaE=R+7g zBG&vANUIN)3F2d$Xp+$$kjcVR0$auWoN1T@pj=^p0QzP=blBBa^WZftzU-$k8$jQj z3-`F3eDc7X9f)wz+OJt@k?&UrM8p`YxV_Y3NsOdr+z+LI>M$k{OrkhzqLb^nMCrwZ10EMh+0Ohz{3V6gs*bJbUEBNfwcA?=}7X%Vn)J zUk;bT%o)rMgiA3h7eKa#3qK})7ki|%kG*d40eM*5uP5o_eK>+6+k!R}?F>YMZFt8f z%7PBOcLgG)op^U6%;X5-A_TUy8}90u%1SXpOMBVIIL!{b%E>2Ogz13n4avS(3D9eS z%}Tc+*mH9<(Z4Z+=r9cv&*p-o!P=hCaCQ`Ey*v`pI9&DT5{%mw{xAl1kAg)TPhI&3 z2^?P_LjV?Rpnii)Mhv~RaCBj)*YIjhIYW6KxQhB>2z88;)c|ZGDNtdDb|i68kSrT- zk_qwg>73=KnpIq5p!5Za05Fa9o$vw0%L1T1ksTr<{OfQou(Y6^yB5JxodZ-iQ$Hln zdD3i?&9YFfIhDRU3yKGpO`2!6>+8|(U1SlONx{lDjB2kywVO$tbpccGqY=9dv2aOC zP(Sneo75yO$hl#@WzSwr$($AosTIIrF9_0HReT8~2~w&-rE6rh9g(prsC9_+>|LE* zRh&IDbU}cc2@7i*3$ElVKK5Q(9kQ01Xu}UFl_|~8!=pqCEGqrh(>`1V?-Kx_01ztp z$VBOOLJ0;jj#5R@q&xdil1!8kDH|w_s!)p;4#Mfd6@zg?ucgWn8+svX9e`EeQU~gy zZ@!K1xOaB9texp&y+*XTkM+vYLrK<~CV}-CXE9V5g;PH6+dqZLxoXm6+oAkQX|K>}qj%4&M?S<(Q+mnaygjd>8jP(v^TH!l7*iW!HtGy$|nsOBD zqG+}$Xs1IO3p&~E)l|NdHXG7d6dA&WiBh%=0kzPoWV1i=P!<`4f*tH?bQ%~fF^T~JDP z`5BnILA*i#`uEG>f6x<4RW7K9ZCb4Pk$}4N7-B*nWB&l#QT|5#YyR(1c!c@Y&-7pc zZ5TQHje_AZT^+1Fq`V4I*8v%RJ{ivcsRT)dh%qN(W*IS_{upetQl5!;7+J;|5!rYo z>J2mp;V0+q>1j$;#@1VLH4i|hL+e09=#vCI;+rwE+0tVWgK<4r z69pXQRzRuwXI)OZaw9?pwiGY*#a}%r9{8I1fiH{^XdGp~pf4JmzD@Bp;I;F3JoL8* zo*qQ+5wO$Gu%(AkH|;c(62(vWU#>F$`L~-?|N5SYEq#Ix580W$_*sKF&LVqIa3|c* zoOC#i`eJ;{U)!v_2E|^!7&CSobmzhU2Goyji&IDB-;q5|5i2&=W1BDveSGorsiBkp zhGb{^Uru(ftU2Lsg89o4NZBKwI_xhvkN+p)$wn%l;!Cfj?dp273)KP4G4S;MW>#6GDGeZTI%`&UekI zn~fav%vaDS$u3Z?d7)`Wj_;VS6Z$d`a}m_xMh5f_y^6KPCS$>}Bf~csClpo@)6igu z)xq_qE%7P5hTN-0$>Cw}xp&|XQzg!nsxdbuRDOUaQOL0>jk17ks6^C(D9ftUx?wc> zP8zJ+w;rA2o04B}l<&YoTKSa3g-Z)>%cYIF>z(ZT#kfswF{G@;sC6+UvBmEAP>UcG zjtVW1i*CyfUDuyiGV*d)UG@ajVr&XINsYv*(8H$=tM6utPsC~u!?0I38xU{I#>Q)^ z_EvlcbpdvmD#F=`UrP`VAkyRpv=%3L2;#Ct-LrD?>xV z`RraiOd0K(B10yANW3K~4zr5GW;yW(+4&>o5MKvyEDW}!Tu{!sy$Zj0ai`!}8j(_U zmo8z6!xkSfrxj~Wj88ehTm6;37NJNxfPI0L!CL}0tw zx$)aTZ-bW31+=#b5gsRtt4~y+a>xsnpC{!72v#`!24536I&{N|qq|b6MQw%wU)=tw zD0vQwwL}UaxDE~gx^k(ybPKf51zSi88o@CYYyN^9TIm*GSmF$)fMm$fKoaNyaad^s zln^}H@YjiZFuZh{O{`I=NU(uyCO^h7!HT}G;BZB_WC%-eG=sb7X0dRIn;qrv*rnHk zuOK3u?V*TpNv^tNgrGjsDTFT2O5dJ|Mz3Gv2$ziDTLY}Lka&b9t;B^9>1)Z;y82R# zSdmTQ328`x6U<_qpT3rlk>6g8oFBnui?_6aZp8G3E1lZ~HwAD+-q%FfDOZq?mJ135 zxA23%#AMn5u2kT@7r=smG(-z-3C|h9t=Bjo3_<1Uztdp(RZat;UcIXd4{pW2^N!}E z9=H-+)A?DlYkDDcbpYQiITm;0iNi5XiQ7O{=ySmi-SwzZlh2U7C|-Ac*NDsKd?Riv z{U=NZ3*N;~unRw>@8In%ymUf2&-KyiOtyR-8g1ipuPISI_6k=DSKh((B}}4&bOkL| z@DU7k-fa+!nzSR{n%2Iq zs}ej;vzoC&YI3boDatVKM-u;Qm=QTNS zrNm%zt;Uuusm33onQ+5Fry85Ce(fFh&@?nXmsR2+HP~pfARitmVm_<|b`&rb*ECj! zEaBWl4x5btJTF3X*-V5mI*}VHN-lc?792j|L;KdLS%%8zE1EW&6ETIB^MmYZ=v|nC z;R}STd*1a}gq^5X@U*T;k?OxDb^lC`06#YV2sbhl?vpGLv1vCaqcsHanAAO zFm*Q!__w&PNqY#sCUpN^|N*knx z_*a;pioBVj;Z={c3D7n$53A2`r=i4jb}cL_=)>&7(l#YyK$3BCI5C2k8JIw%B7+fV za0*8U(e=pF%Wc=eE>>4ysB2>|7kA+Z5SxSR$tYdxq$LqKDv*OTvXxphXaED_I_PE4 z$p)?KGS3ISrE@yfQ4qwBQ;1nb#CG+anR4)k;U10t?=A11X*hq~4VKY{HO zLw>K3A8kf3fde#t2O^xZXHV#4bzube6>IC9%0tzK*m;gB_f;4Eke-XG3z0vn1gi_7 zFv3<-UHD^qPOUC{oSyFL!oOMZ99CWU5}w+dA@6p;5)xN!rycCX)a|wO$lYE~k8#`i zGIi1RYI+;K{Ri~O+)gWG!oGb29^rXTegzjy(YovG3|xG>&Y8e{O*U#Nk9;4Ia`-++ zd^py@KHJY=E6cupnw>83!TnuXXLJjmvgTTP0?QvHYS#(GkjLz+doZKZ!Fec9ayaXC z%7}b3kq;hUJZo%V)=&zxE}GNfJF8E)6L{>~7trikJb$~Sbu@a>0=U%X?eV|b9xP}J zgtMx6?SP-O`wWn?2it!6p+tCQl2?{rymIhHWu%wO@yY_o$LupIVm!+nTro+H5QKm7fpung4=w(vgVkgp|>M658Ue z`r54w!EajVzX(WLONlNd0OlYZx9U^LQ1BYCD0VBh;xybPqqo+lCO)NTl9gZbF|4&2=v6X*@On_VBb-&ZJ4JP4}Pb z4&V(6@$rLFo zKJ8PoTw&D~;FOEey?P|O1z;kJN5TR*tGc+fLAA!<|)qwe7i3@0tc#Rnp zCNG)Hh$pdL81olQ$a{do%VfDGqZpeH)Z*U*Wp9YsV4%Y`6ToW+`k=E@*gBQeuPwL` zrPPZa$GsLnIBfqtehb?9sS>!94xy#c{O){B}DO{wezHT>(uwLl43 z57KZfnt?ep;)Pu%CKnZkJ`ZQz!eMe~Vb@J~VJ{5;@jD)dHUnEbH=2_&(>HhXJxFX! z70o;Zi=$!#N6YL5OiygoMnZ#TZycsLs3$HbRDT5m^u-L7!}hXo`p0Apj=;%S!_>5|%8`ZoqiBpIjz;jSF{ko>P9wmL4 zU}-Ms(GCQ)7O8$@BB$!iOgRDvRJs+3tUz2|kP4^4Chymya0!>{VfhJlg2vouh@Hqw z5<^*z4G*b)2*>S=|pEcsB}8f#qD^T|85n&PM>0KGw*KKq84M6Hu>A2bJ5{??)2{ zfu^!Qe61fQewKzt1X8|8{GRB$5Df<%zd%{cOxj6=;~Wk@y$m39b{TT4x&J{fGp0i9 z=GqDye}%E<0~u|goPQoVh}%XcCRV8DqXdlQ(_kzQMr>Z#QmpW8aaKT8Kio%-f1nnF zHdM+ir-1=1M)n9#5g(bexptKXj4lsZ&T=Dvpr}hOp`QwENpm*L*dywLQP!SNCXod;*2?(3GPf6l_D~g%8f8B#(VY_HT zgzy(nMYjGvA7lrhy}~nc-f4I*fc~l47jo>A6v)i7IQuKot1gnYSa00Veta5$c0iT9EME-#$K@!a!M zI6k#$KHr@^$nPxKjKfn05iww`N$TlGs%B0C0pPv)G=^paLc&IS29ZRwXbzZPrh%HC z<76lz5m|Gc`T}X54qP5z&?LXw#wS#|Ol|R0#7DAnQ8bZ>%NSw5iDX@gH+DxVMqTm8T*@w2+A47fQ9p!An1FgQjndPVPEyR5V z(dyH52@$Nt1|5B{?_|Tg<23cmGVKT)LqgXRejCYV0^Xb1ciCHkJm|8woc_z+MkOzM zb0#l)OT~2XENBA}CcC8+MlkMb5PS{qZiE?(QC$dQsv%c{1)bdUlI&IC($}z$$QGbc z>LYG}R8g{-!cp#yX22wRn>G_Twf_>5E2KIl7hdGH3FS!BHot=3*oO{yl81)rt_Sj{ zXWFz^b=L|60U?8O!6GZ-75rfKD<+^qXrtsgcm(u418X=D;d>wgXfT#MQ*cc*kJnvq z6Hr7u2+q$YQw@}il>k>;Y@Sf)lA0qPqS&{aW0wc|rAQA&PCR^o?1QN#2PjTr4oY&p1BrQZVyKkcNY!af0%? zYZWynY+r(Ubl3Ndmpk#oO-tmqgnRt#lM#WPDD!E^2w2Pd1{q8nr!m5=fG_@L$k;c~ z+7_99@EC!HqXkNqP?V<=qH0kAZTDi|d@tJo{aFs5xz`c^j z2s{LHC@!;jo6zR|yTQmDXE0LY0EZX)BlEKH^!g)n9PD zpoLbwYwdaK;&w9LUj?nFNIh$T!|qwD;$jzpOy+jNCvLtnC&s|r>#GFrHfyu`!W={* zRn01U9~6TRnaK&HPhUwWa=K!)uXeEmEMTu=}!*dxQNgInG(5Bdw^ZyY5?`bg!Unu~4zMF!1PN%}EDxk3kD^>T zvd$)PPi-^LOMQ7#jXYw_9#jSF{`;r}(qHB~{>bRB{TlX)_TrWW7;sWyHctUgsCz#R zVHl#Cz_2If-=okf6p|XefZ&fj=)f5iNo#_0UmkWrW&F3JbvVL!GBHCG9;^Y-S& z>X9WC4VDXFkATqp9l9J(bB&`MNf1asC>LPQ3PgSg;9M!mR9{{c#ijZ-q8ezY!Bblf z-|B?-m>~?xB(L{Lg#6Yab8Q@8hfli3aiR26qqhwE)`8+9YH(!LQ21!jXczpOjDh1@KBMzygQ+n{tT{ z>{kIej+>$&hYXK>xV_H8z29xf3qC;JpywcL8J^wB7VfdSx0S#m@ak>D_PmCh6bLm@v^)Tp$Qy*4J zmTpw&#oh|;wqgveJOq|Yaq5I9z4Wy?;<~q(P{{hO2Yr2SJ^GsJt7*sXjXEV#B=owO0RzfOn;mjW^+LidHYkNmY52wg&b1P2IF8W{1Yk@g!KaMkS2A=u zXcemB2~K49(=vX5(N~#p(EVZHNXO5SpDhRLRXy^gpkM$WV0Yq;_WAoxDo=mL*d6Jx zVcO*vbXMZfym?aj|3vrb>;FH~{S-v3|A_AIz)17|jqaQOaUQxaL7$zE?!Wvd-9H~S zknZo}Z4ll6{_B57_w(4_`BCVAHp>ZBOuH$Y?{CL(58pV*mq*F=NRB19;B+7?AuF>n zXpiCtjwNel&zkHtIN!X+omd`-thV&~m&^rH}BF=Gn@qfd|M6qda zCB_BdV}hw(+Yd}|A2wAS#n2UNR+G#ZhC>D^lZH|U}B#6 z&*}OFs_mz%ehEh+n~+fP^Y7^zatx;HbX;2cKcwplYW@E`T?fb|n3KKdxJQj5m#37v z+n^BMhE?)5Os%)M<9`R~DvCzonsIWA;x1H_hdmBW+r+`PVX^q#04Jt}Mvrr0tsF}X%y+cDd`rKBFs1NRAWx=q z{l;D#@=d?7OEK#bh322@I96k1Zlo!l&F(l)Z=c_B>=5!(Q$fXFe+w0HZHPg}U#%cA zGMdzM*Zre88S6%%zLWUTvg5RKX;3o0dRG4!kK2uQz;yd3T*3JbHiYcI|6CWc@6rhV z_8#CUQ5U3fl+XoY&An8l-Nz=$$Kq`aP{ElYm593%7)-BLvEZaAnN{G}0$9WKfgOG@^nj1i# zPNn9xQz+Wf+x>@xBA{Oi>0G^KA zy@+KA2X^@(d4}NaL#H=_`RoXG+L2{EnNwI7UzR(4I6q5ncOu&fnwsQ&Vwgc*!*lzxCjk_PPiTkfc9XaSnB{?ERe3aCjeyXY7<(ZG-J0yDhG4d z{==m7n?&hvn@ilX_Mnptuh>c8XXJ6GxEl(LSVJxhRk)lZ^Z=>6%h_f8z*S{7++zY- zCCc1~e016b6v$#Z=ntc762-LX>ykYmuYx|;!bQ9`_Sy?L$#{7{+f63-Hum~&jJ@Gj zWUF+t_nlOlRzdbMI*M$y>Jm3r^%B&BlWXwP1J9iAzy}5??GNxf&iAOsKU3($)wqC3Fcr&d_SiQ*Uu;~dt#&C=mYqN3A#D`sBzLx~;8Q-Na7Lvo@ zc=QN9GW_rJS^E@b-~qB?Fya}n1lm~p3>}ihsdgwJNVR^Kje1WxCfPd z%i7)lp$K&bzJ}0^i-h9v+QPBC_t6e>Vxz78_+CsDr5Huz;PJ-DzQwF`+1N&It9%BS2jY9MVg;)$Q& zT_#_O6nJhbE^lF>;$&C$qMBklW zm?zN(s3SuTJG7R>V#(99Cdxq_JoQduFPzvfKW{-?V}=1{6ik;e&rbMHvbB(GQ%>sA zG7@lBlAc6Tj+aboQlFnk(qA*_h~+IU_$OHRQ$vCc6kQ7T{{U=bcK<72*!Y8x{=S9% z|L`KQ|Fx2KB7lQyYvBMe76ZVVfY~5b+?Gw}0*%X7!g$@wUV51G|GQdC<5xP2@dw~o zd=`VCJ5us`MQsclYJJ7zaB9Q*s9l1;exnnax0gdcmR`vEtI?xm{DOPITj~Jl?p0cgg`?#PwlMC0_e%qh&U7$p|JvV zB}6uiz*|I0M**Yu7}#`lxypX1F)U)2?nSMxGm{c-ymE5PhLc29ro)@07D{c+DF z|twKVU;z1Lf z8v7b)n6BWxqRfEPv@)GppRO?<%X zTeZ2t;4MIWt1lK~0)3Xu-83Nn&H?c+42Zvu`q>RSz3=iW{=NGK8wt~tRk<8bJ;l1HE&PCmE^NZ2PgDC z?8da~_;d&H+N+USJHcp?SxWIOTn}E`qsFh6;HiM01C>~#*s7tpK*!TQ zaDDnXC2p=y$;l2plksW%WAfP0QZcpC6$hAwqu{^P3aF2+ilS^RR<)Q>P3EKTr+a%^ zuvi7c*ZKxmMhNbvE zJdMvdEgEz$;vkJKq7Uwq8SONj)vhK=6 z5TtZMV9O*g0djVEI6|Y;_zol#ss}b2wsfMFal&K3ZL9DLx$AIpUw^P&G+#0=bjGf6 zoc$782Gru2vQ|Zx{!#cl)Z}s>x$@#%7XRKY_F3?$x1kx-M(!W695jX4RtAX;&UM(w ze&H1h)|S1PlTaW~;ClBE_u%)#gI{|ih;zB$Pff1(|Ya)31qo8v(m(tDsA z8ag2i?4~0VyBbrgp%QYv%T%q>Kw}`A%0K(sr&?WzuaQHwvanxcPSahlr4q(%+bJ3# za&ai29Z~R8!na?ZcGGuw4*B4e>`7PhaK%DOhuS-N)n^guBei$fnjsCO{x(;~d^ER{ zj;Ed;Dv8%cnV%1^*0m*MJ{9|D20u8-Sq-a z#=wHJ4oe_;-tg07C{J`8z^4Pgi|F>zJrdKKaQAxON1^6uo4&649^IPmx)QFZbcG89 zFWi4<9@Odsr& z;nFzKhk5$Fnqn{Y92rXteV$RXqiYH%!2x zr6i*e{{z%r_>egN)YW_h0A~RntbwE$z=3BbAL!*&7+ntbF#K!J;W5A}Gv-2E<`l`A zNT9Shkh{QausQb7|Z(M*n`wpOjyR$FarZ*8@$Wpg15Bw>?H1w{e3`i>)tSRnyn z{@>?4Gf6CfO)<_X=*Cva3C0gd~ z8PdmN>1G52HzfVya_AYzLL@e+5Lq|!%>4Wl3Qqk}IA+WbfOvgsg2&44NKST{(~)Mw z4_!^ERKvw9%XNlb&3#hMl9S5vZDwa54S4bCi6FW|(2{``56sPXv;($4m)jPYYXZv$?B~i2E;n{I} z?FcuIz1QOM6wUjbpA|@d@P{n?PbT;O>0X=8qNMiPI9mBX*=xO%Bb(KON2B*z_nP1d zDVpQcV-c0sOJxvpO)`yzJgYNhEcRaMV{K35y+M_6m)dq4=cD`96Bj~R#7mwf?C7ZL zX1JH88zUUoH81ltbgg-COZbD+(UubyQI@TrWbadaF`v)9djDGT!M5TK%Ei5-L>kC@ zB#pE%p4dd&JL<}ix{2JoBc&uHcUJ1|7H4+(X__)ppPq^4%`3u^G7+ypT8U&Dh}qlE406LYF|v^y&)sv6|-R1NvT3Q0LljFaB?4M z)irgh)dEdNpQ-O1AddPL434xQb@`Ju}GZN_%QDb6mHCo-k1d3%bq%Df< z!|hk~3E)!(L#-3DFgncScThNsE*t75TI{ik$@$cMOZu<}*~3MisFD8c46Q~w%q8Qj zbRAd&auFS^DS#4*)mSS)Pjff*hzH66IITzb^Llo_pYDEE0?k+Sc3&~AYmo9lbXu9h zyw2C)*hcyRycL4Og2PL_0#fBqkv|IdE}6>&`!6!1Hk?+!;UQ6yz!#jd~_JyYo$c0lC@2>;aq_eE9ec{RhN9;=*0Z6xG_h z@GN~~dQl5D+rBK-it^yTn#uvI$WE^!BC2zW&`wu$VtHK6=lXh$n)Qji@!yKA_5pkd z1PlJtsC`mzA}wnm@djb>A)`0Vsq63=Hq zqrHK*jBMyt@7;oE1k>ME4S&J}SbA2h@ulmwCHk#cqR-36B7LORJJaj`J^C8?#RtR* zLFF63NWX5lNz`2l4P-`7U29rnS$6Mk_E94z0t3`O)&AWj)vrahzGUQTs-6N}X1CVZ$85ypwVhX+1hfh0r@re%ou^`u2inlq=d1gEh=TeQ~Jz4NZ_VuQlKh8zCVqYh4STtl}H^{xG zMf2Ts1lx1Zg0W1ojoI5YY&tqVY_&54G(CBPKB#ghF9R=thArNj$Sdfi4`g{00(SYn z?-H;qU5TLV0PL%{B%m0PwDs})%^?>A{2g`chfI{tVBk9m!i&s1b7XpS$Nm0Ha9I2JF@OB11Ugf8r5EX}NnI5k~URst8t7BveMJ*x%VX#!^EDUZR3a^lV-syqgj)Fx$iKy8C>O`>Vz0SiWqW_Sfx%SXl|DKMgkx z_6;lgZ;yVs|qv&96?4H)fBoepA^-zCdg|XmIstfg8 zhS15vH^l?@+F<%={SmRQq6wKWXSg>podkN_gK7U_dXVz*S+SR6{g`C4rh`bJ+;o}n z9IbLi$7?$E@|=g8qY_l`fzE<4aN16n<)Pm6h!fY>#)Gd@fjWa~Q3`aM;pMVaX`$P3 zkyNq*WDx;N)BH(d#(0vb%Mg;RRY=B7YHKwUvX?HrB9jprq#dIGw*z~U5?$dNcVjSO zIxn?gBzz#apk8~UnA8SY0Ei!-!e#_HlHO_-Tv5^wC;$$ZhY=_ zMjf4@7Pw&Tfx>2jVD9ld8{vQjPhIoXmuuI26C}dy_NKZytb;e4=%rX#AjzZMx>!H zV)eX*KNzl8fKd`VK@W7Nf5Z;omUzIuEnrM|w|MPT{OJ0b(tcC{Un*IAzprq!x#f`0 zz0IgAry`^U6=LpG$hEp>kLhgoMF*7mky8zml~~;r3iVtHA4|c<&_`5W5$}UNe(zv! z64M!M2wGxGES_46)?)@GVHe$iP#J1+P)$6hdi6!@A5yM+>HfEa3I!RA9#dfpE6AAk zCC3!c(@W?vLS1V}$8%*g|I(xBC!;ZHg_)fi(cakEa#&atyZ^yl-S_XzCd^9^L=D4w z(tb%Bz0pXkALfFEelx8ap``XD(y-JMefXXP+V_?JH)3$g8v<8)t3Pm+i*fRW$Cdv6 z_FuNkYil*C(eYw?tLu~Xe8$(Fz4-omZnNvL#}UyKA<*~-Tx2N=pp&5FZ((npJ$NKsEl`j|4b@a5tp*0EpUIU4v&E(hnS~)Kzu*d82 zcaSK~2KXmc|SCJmXcbJasv$TU5q6WlrDUW1D>kOh79S$p^N4KK3Mz+gtmzL@N$bx?!@m(x39h?uw@9?>d6I5m;gMe8fi z_HNod=Q*mBAq-VhbgesN2w9MkiQtm>?N&}L-*xxr>0Sh9_vrq^pt)Age@YH`P;^nc zKb3s+=X1V+&On!p(FLXZC3g4E)(x$f?ypxBFoDVbnqbTzMZFPs2+n_Pi+b{HQHcD` z+~4}S%>7<2uhiWC3@~uwxo;zv?)*}+95?sdK97(#(5}B51wM_ z5T#e!I$h<+< z^pMrfL0r*J*Wp*xHG1n|PQ9y*xPMWwhg6JRmwPNsqhp4z1LRx%paCtN$$ibrcGLaU zf;{zuk8nN}z58@TreA*mcDS26#Ag{R6DTu?zpf@V+8ohrx|@x<8tS7l#g89PwTSq^ zj7iu*0^3gFp_btbOAuO-p)`{0t;hW?XzWNqbMqCx@UTjk?e48Fjs7H$P0Zx0Rf*26Bz^f5+3c|Z2le0-D| z;?7p*1tKF1InW2lXlRM|7a|F+W_<^B0mU2A>dT=a2O9VNjD&LBfSN0%8Hh4?TX&zI zwDca)ELCq)RUW4(x0TL!RykWe^}Q0Cd~m9oKFg@`gd4wIw6O{o9Sf&;tq1h{ic7I? zd(nM82sB8Fh(?}h=pUW1k|0*hMsGB7<6oNS#-2><;}EXuTF1nl(YcPY`U~RDqn-p^ z$b&oSY1o3?)7q_0L3z*b{JMB&$;>pLQT~Oe@e`M)dQFA5{&wM}ri)!WYkh{SyWsSg zA8c7f5bX_SVf*qzMkV~Px~&A!!KZ~`HrJOsJVY-tYdRIvwfKB<`KE(ce)RiathoO3i)=xWVOi=#(UTks6k5YW%d(Rr9h>eiGc_mybm zlwnO=1Vj-5XRVf@>hp+FJULGw+dL5;SZ?A9Z2T*CuvN<&xc)ywXlG1o2{-mOe+eD4 z;5@Z?S2wZw-1~rg2;yU_hsCr|&@2haV*NsQMf7iqR9zh``d4-5>C?-4Zi8nRZw-$h zW&EPqbZ)QPc{lDJ@hX^ToKQU$C$k4+S45{c)Hw@*%;TBQ?V-%p%u&Rac93~!!vqNg zOk@Mi-RM5JS@nI4rS>$zSMDtsrt@7h{FTFWzGD_1=8iPOM<5XfoX@1cr3y5s%cv8Q zlrUiFg%phjYEEXZe{b?NfB-_v#uCt6eCpO%U0(L#|!M9m!m>nnt_ zsw1phn3LwKyeuO)1UHNB*3ii~w9%{4NPwBDHm@TAzk-Ai{wYU^2g%AwXy0t&+P}va z%JJFL6Ot(M=@&NA-#M6y1Il2b-|C1Dm%p&F?A3H8S5qXu8`Y0cC2e&Unb!Mpr|H~L zci`@Y|4r_5$t_DfvIlI=s_*MzRq7fSjXx+>>T_^?Qv%} zaWd<^(X`%DAB6ysm8~n~-jHs5^bLI1Byce6gUGyc4_bRoKv;(PC3WH0xy65Of0w7D~eg+h6htWHTv~gnZK)?0YqJm762pEjL!}^Nn~9fP~R#IZi6K z#eq-dhz0OQemKi`q%rm^<5y>e1#X7sijO(#4Ls+XBi8svtd7|Q*6MtkJ0`oLFNYcY3EwSR&7qYL8RrUj5&Jf&DtN2LZ zuiZxOZzc>I4-vS9Gn6 z)$Ca=h_yMQ37O^E_$YG)Z~T`g|-s~m3bZke{YAVAA~>nROaCf;68ILu3qZyzI+x<=)daj>F7Ufwh#EMbZ%zAL{@c$Egibps^p6WgMf~AE& zbcENXdkDCCV8qMXi+?!gF>2l93db^x+HV+=i2JUSKLf+DA%?gUu)n<{-#Q(^GC3?S z@uC%%K%WIOvu|!eosoi+fJS@kAs*$RXtqTf^R z7)2Iy8he^nx;iUg^~sfZ!Pa&;{d`=nUg!h>aBp;tiH2lIlD8~1uJ0Yw0h?I;UlslW zIiFx`vra76{f;v0n78Bt4BwLLFzTPImZ7EB!@*5AmNx10l=B}y)AEe)T8CMhbK48Z zsPUzjqSM~7o%7!gk|EyA&a@c`*W0hS@?$E-K}5mNIY8Y27a=Ws#wJhcyySDosy}cv z#832ZnPR)90m+(BVI9};XS8)ru^arBTVC{MeoDF`J>i=093=R}q-?TsU(M73_sVGo zbEnnP6=}HuxJeM%#rVw3RPQnB{^V+tdIUM|;cx(@yS``_re*I=QjJk81Mj2_Y%QzIp{j4fa#1V-4(yO2xAsR3c zRRZ7MlH%7(@zWVee3j^PK(mL(J;6Msg)W9l3=L7^$dD8$pG5V`ng4qj5PH3I;{#t! zha@c6AxW|6)pGs;KRiZj?u<*wNZ7iRwDInr+Sm-BpdPQ6Lh4GyzcxYW!Absc4`5(Ne3a1C zUWrw+M!00KS!M0tM$Sc_a+I&DdT7EKM^At&eitzZC&y#!xS6oV5XO8&J0i$~w$nP| z$KJ(43e3WHKa-$4Zth@GRLdf|4z-+FyfgNUJtsjIBM%K!%Nb;HVxn7EFqw(?Jv}Dm z@$L+_ks)3w7?){U58HFFH8fD56by&dMEu;I2)9*6{A|7ebV`IPvACcJi(h6#=4B+W z`D%fE=jW&kyBbMN%Zwy$SC+)>G*n4NE=cK%d~`z+7Km;`-tBO8xFZnK|B8R>Rz8FE zvpN+9ADgp(!gcq5aZ+2thx^Ul#@Q0SQUO!JLDHMrtRzf%b~RklGpUBhcC_P%azS+Q zg{)y?sUgmZbvenBz+szi2;C4T`4Tz9_>3O%?k=sm?mD*$pWd!>(++#xWdEK(egQKe zf&5{UYC+ARtU;U%{*xpf(4w0Qo5D3vwwOK0Bo=7I%<9Q{!WVmhV<6rI_Is@lI?5bhpLT9=iK39bu(k*vXD2S65lO^kJyue^?F z<2F&>w814h-+qDVsXpP|L~+4~?vUKsQN7c48g;^kiC#?%*Edv?koTpBse*z2}gM}gi z+NiEWm@kzrV+;XnqmFg@mdDzl$VfN##eT!D--6<8^^}~72XQ?LctS3m1oXSm~lZHO0;okUp$eJF=t?$h^aATU;yZj3r5`Yumh$e08qfH7RrZZPWVb?B5cYO7LKic8$O`a<|~&Up5o=|1_N3Ow*}FHdE{~GRxF`2qK;_zmFPA z9xp*Ax5CDH+#gx;N5mI@Q^%gDwE7K<(h{2DYTAh*^_ zz%n|QrcSPB^B(^|5M@lU7nbfdO|Pgt^s7J}4~JCV2HEy)&G#whNd_6%Qdfb4BvJrGS2zq(=o3<6 zpYkSS3>9=|-C$=;*8MtfKI>rYH7VCs-Sd)1CiD_nRTf1iRh?tM*b6gEzqE)7pKK3M zy?oemu;=cAJ*Qc7Gg~;ek3FA~SpcPOOFHlC38eDj)%v4n-1i$$yaRqKru}kWXjQNwEb3|56!#d@=>B-mbpSR9W6qFA`otV!H?Dq3tO zsa@-3hYE>vVPouaI%+Iyi%!d^YT9E%nkuJ6h_c#cMJv>1awh>(u6nkss16k9M{6=t zBDi@VI(EDU0dGsM%7ikq*5S}p6dwatHr{zNh&teJDml=U8|v$+&pyS6an#6orl)>1 zk1!iiq(B9u5e=a2A%;_)TUJtJ)IC5iUy~c%Z~e=3A2E#yUrL-j0P>Oaxz`AX8N>Vz z;f6#rl;6E?fy?LKG;h4XuJETvec{#U1biAg$R(_8bX4<984ZNgL9^*Yr=}{zji_{$ zFzU_XY}I+l@BSosim&iXL4+F#se?NKay~PRzH>-!pPYl9DL3EOO$-RQKQMX?N97ImQSv?0 zQ+>B%Oo-_{7~8`xMK`JuLb7~M@z%ETJr$m-Os1i+wfy5WHT!;9)j%UqqpuDc{0;eu zT>~Pji$Bh*zJ9ciOLatsrSBi3_79f;AqCk!eKKmNl348qyPTlEZbG@H zdTd|e!$s#M^o4IazXG@@M2`HvRa<#YI|`Y{~86@U>+{K0LkKsY~5 zeFb47k>+IDBa~LEO;3Apo_bwBEJ zca~q695I6%pUioRJqIk?xyigq<|!=aPzJ2i9Iv(HRVH|I>`4uV56aa1n;yudTQl2r zzx2|2!b>25vOq;LJdv3!3daaVgW?V9FMhIzS1;^3L*jnxJ0x>vD|`Ptsb8%jH5NyN zlfqYFY`R9=n>-2JW{5M{C&SbqDK9g(ir<$VN*9J+=viPG%~7e@el(TxOKmb0?ptl~sfDKu1YQ5Ec@)5nDFG?q~= z>Jj%F(M9+`=nQ{Z7QZ$+$D!Tfvvv2b7922%k%BdC7PHG4)W~e)s2BNaUzhP8RF%3V z0OVbSK|IM=*7vwdWor+XF+%Y@&=q6vOP^rpPo-k)<$@9n0v!-To#eQ?*SG={Tdqz4 z#lf4aN{0k*)Dg__s8>I;1JqD%*I~9JHqkH=*LUSgZTPdyKB)Tc_!Nnr&ObiR+`~Vm zM!X|B!vqQ8F{-fG_s4BgZKxtQ%iM_;czC6)jC+#icdSo1rPK`$K84m6@w_3%(!QPp z-f8KWS(w(nxqNQOcEHt(%YKp_JyK0rOk;M{Ekv|b))9(cEJgc$Z_$2s(ai3mtrTr6 z>jYLabhy~WP%`B+Rzqk$%YKEtEu(zl6 ztNw9L2lqwflD5f^gnjz7PR{OwS^>h=gebSp{hE+3pbL{XQ&(uY9eL^w+L9?K(l?Fv z!FwBIOgLqmiF|>>1T&&rsmqN1O?L>vBS+Nzh6LM$G#S=TC#3=RQ8qq6U_f`(T^d#0 zRSBgz$RiA?BI_%nVdR6ApRG(N+gC(Ep7?90u3`)6bH~<%MJ*?>f}X*HXemxar)8?D zFGQ zA?8+#J4Pmgm#WXBvLhX#vp^J1Dclk43%_~j3Cxw`mVSy zOk|(!-+~{m5ZSz3&w$5cMD(|=yU~kC>U2*{<87>D>T{;Gt|>O$x#i|{bSh19q$1pY zFMW`+i0*6pV4uJ8ra93kegQe*ue^OW@^x<{u-vRX6h1u3c;NR`SaUc4AI`Y{38_4K z4uW`S{8>$w@gWy-7B{|^>VE<>VDDFuKe-w%LfW>V4J>4{c# zsqh=3fz|j{m4A)=dsUse&_J?PlP{oiu;?&mZnaO?bIam z=EmEpFx>Q|$GMh+gLaWw*{c7RzsjMy0=ZVZdFz(3KIdEJt%t^%&JLgRTZq)~l7)`o zX#OT5p^qao-}6S6?C?UHcq6xO@J4QG51kxdvN|nD#HIi--ohGVftS`6AK=3`pP{~n zUu|DzdV5(oas|zt>5=Vr96rsxF6wakDHhmr)Xvz78)BQFB>2_vOMn(R00uj_(-Faj z&Xrq}KU;$XR2{9QFy|jq+$+pT04y^nM+)^BVCp*o4(^*$v=^2({p0V$d`v#RZWaPq zw*-Wn-RVQ&G&BfV%E?ucbQ82eL@A?u;vDF+_!-BgXG^*WTHthiEDZe&>OKi+hf#-c z0!@1AGO8v;uKMB8@@Pdx>?RVWb+wglYFSLoB#9Va^K?uNumJ7&vQR8~2D3{}LUA{c3nB6%gkY=Wy?|m(?%1 zLOoPR&%{pvs>-5Tt`CHYQbjyl*bmSLDiheO;|gHWf`O>zjQ`zl-J}Szx+kWA2oLZ? zB+X}S_d%FPmQ?r(+u%bHHptg>)G4ZzQB%kK3An#CjmevY;VTE^48H|hf&bD*Bmn6W zvXdxG%~^hDJBmks;P`G>Hx)>X*f5B)gk>@^!VKC-)BGvX0s8{hK{GNP1RgI+(`QhB z@M_Q!Um?ugz~+F}%Wr+*ORSk#GIk`4jKa-4o6hMKu*!w`Y-liI)bYbSl+O_29mepE zDUpl|d?JZ&1tTh_)_;dy<~!^eGcpL?*KlUtXT3+;zQWy13uB8r?XiJ?>*66=vks$V;DtJbk6hnIL+!HNE%+Zc#&F3$*;ZhQx>hh=7wH-jK zBv2-?q!f7) z_sDJ%&L@%rV`}u=i_vR>6RSp#pkDUy=`2mHu4%nm%M_$6%)&sJ+Jox?|O_tBuP zKbo!h#llEU(}kqT!G6a*S@HFHC3@Ttk3lBd_kvhUcFlkEg@Vn(e5(1ph*JWyW$BzG zT2Yq|2?4iJA_~0X08vlNCxd%S@brfCbI%_;YC#{C8mF$Q@^bP~?yHV5+pmK|qKa(+pJv8#nZsaEF~C&)Q!LAEbh*-MVOI3aE`3(+rK z@{DL#MwUE(K&a=JgqADfVR>NaUVa+=BR&HLw4=iRr)y=UkX%qV}U z*8IpG{B`iIn; zZgM2%fUI&P9+qk>*R!C58q=<;X4!CB2p7_`Tv%B`$Il_&BZ~P9en2AcnHBoFUhy}; zLSyV14vp|QXOR&$1gM_C!7^EESef!?h9p$jp_o3NVK4AV&vLOd_Li1)1!I~ln>(c+ z8ZT9AIm|RT?yWsdsIlgyP@WFAo&mCV-H7u&oq88$3G_+EGA z!~5#;+uT#mFFtB;0`tIbE$2rP&AqDa6meNG$h2~HN0>8U;T=G=;7pH>R#r|RylBPU zMsAwX-(lpMxki6qu8~`oZ}j)%^IpVz5$|QZm+@Y~dj;>ac%NlD*O4@bByF9E8eC{8 zCK8VLS@{26?qCS&k%=Hle|;&1M62+NvV#~*nlvk(Ji2@78Z3jhaxP8w^Lpv0|Nl+b zKzf(Phe@~6q+8!kpj!jGyY=~BWu5VhBE9JkA&DtudKe;kXeu@8n(-5+uhb#NYrWMx z%Ie+&XOqS5t(UD8LA2FlP+}r#sK74C-9yPj>zS2V?<9G$YVp-j(q~Msr*GlIM?)vI zxhC<<8=TpS<(#y<@Zlq&0d21HyOJ*;8Lgm!;ltmCKzF`}}=k(cF-Z z4#0cX#0(Ky%mHQ;p-u4y5MrdQPZF)0!#oJ80U%H}=dpG-7L>uQq-9EZ1toh#rixhH zS|_@E4kP5Yqy`}r)rU356=8Rh{kT$f-2Q3D7W@+hHTr5sAC*2FfE(Jk+COMeZiC-- zM?CL9dU~**yyrJO;JSn2XydP~e_+rc9<{ADGZOy75o(K1EZ(Xo36l!q*gg$MSs2I* z@NLs~P;PWWe*7ft7LxDad3!{yzrMDwR0JB3XmY(f?@WX!4p zBGJa#joL@$=vn1OD`BbdupTd~avB-Z;{P4|!0bjqo6p&(p_}a7rN`DpG(iwb4-Q68 zLmzaT>QA4n4nZ!?P_H3Qu(<}9M$jGQMTC&z7jp&$EibD*5OS$ZO?Z?Sq4_5rwjqy0 zAP?R_yOL4Oc~=er_Si1fA(e}))gEYh5h9O>E-fXj`scY(>vEoC*+aDmzS(ag1P_Rx ztP-9(z>x5(kGjuvCAP^BXd(gmD@Tql8MOf+CM~i83L`mJ(rj!DNjA)|&!}c%XqJDc z<+^}|>a_JEKC8kj9;RRV?tenY6TeZrBvc(c^qusy4I%{)x_=NHFGS7h_K#G?5*23^63Q~?_@wm}dU;F|38F5F(5-nc%wkvzgD6tH0JS9_U! z{8(cpY1CfHKJrILjk!(y{C@4T)?hP(L#f2sg;@xBi%O%n(X^1S2_*#CzV6X3{> zPUL;`H@hyR7-@1zT{b9rp*Op^sgMq)NA6D5Yt;RMTmo>!W-FnfeC*AhRS8zE;qTMGp%*SAPd;#|S5xIDlicD>6(@`j41 zTk*(GvSNK%x6)yqpj1ZPH?+i#iy=-rc3hva$vswU0AJGx0bXD`cwQG^;*XwM=4<*W zBj8*Qi1L&S4vsJRkt0~j)qyVg1P5H%1qHCg9G;p(a&yib=xNAA-`O7%`BpN| z{5=A9ONN9{(P@Geal~KvP3W)8AswvwQjhiQ@Ru3Eq2Vucf;sLZA%;2ZOc@lz9F~_H z=CF+9Fo%7O`j)?NS8`>|=+)VJJ*P&;78=XE13Qu|Kbg)iV}oTUvdq0Y^kvTJDiPfB z@pC2F_Ljzu)v_zCp9ys8#u1P^$ayaVzeZ#ufz0=5%vrSqDFp8*!V06Tc5(XP&=9D4 z7K7^z-~og2@)4~`zh*9dTb%$Wk&|2@h`>?}9y9*F;i0&VM6%pO{ z^E?xIPmn|GYLn_S&#}i)GeJOEghObyCTuHSMo&VOB|3VN8E#D%;brJ!;VqY#g9k%sa2#z8rw!wdv56Dd#}#r6j+w}N zlA@{62P@IVtzzKAQUfmymWvKg^tN(f=$iDZ(okCHBeO6rFXombrgN_h>1taJFKf}? zFVrqv@^&?7yzIw=%)r~v$rH5bMah10z3P6JiAq^YLaAZ1sE{2CpNonQ*w*wg*axVu zpH1E5(#6;-fYFuXSTdiLPUGyx0J^b#AUgoE)VKx@p zw-w9@og}Qrwdk+F!sN2KhmRio0l;=3N?+YSd0>VwHA^qLoiD+Q)lcqX90Jit*p*&p zS9$|Kwq?H5x2UVW^GT22URJx;sC7s`vvfZs7 zO<;fNMc>Q?X(;R}cuK0^`=qOC-pI3s=OE7KFKXmL_;|1TtX=`@lR)98KGd#RlhLs3 z;PDDq;2D&JA&LPCPV}=f#sv*e$>bJguMLn@(^d%{tY4oym)$&`}n%e!xWSER-Zl}xM zefj5lo)&Py`Jw>_jBX6r4_pWDXXsXQ#Fn!9J7q(w|ClegGAcFKQH_O`y!a%O4$Per)N843i#7}6ktlY$|+6I#rpN-3fkB!=dYnbyczi|=WKhZT@fdae59i}L_}BLN_$y`QeEgX6aR}~4{CazOK9;FI{(xu=p5+Pq zzvB-`mabC{$d9P=I|t;~!aA{z)&ZFwkh;5$N7cmA(F;Ncv>ut?xnEZ7x4)rN_6eCF zC&YSN4#8}A&Bky7yq z562&*02jo6Bf>bIb-(keE*>R!+%dWtTsHb(5T8Y=ERze;>{dXz9u0QkI+hwponeX) z6~heU8v1Kq^uAJ0|GH}i;E-vQ<8r_CwZHJ7mT)CntH&0hK$~PjT;@2ZLuYLx;NhK|OgxQL$mWYs2e@&2-0bT zT1-jMneWACii)|A^=OB6jrKMft%NK|S}_-)3Bs+d$9z7#|KylUM?wUohWrB4nOyWG zCmeEB>A7`+_)ZPb<#RhFV5blzKIcO_ z#jR7UY8j?G=Pf(&Ogm9i`0ku`JF&l=s3*jo^NOAL?UM{p&nk1r7O(1_hu+lZvU9yH zxg?mQJLm9BJ)qTgN-im1*(rawQ)ZBImR;%@J7pFr!|ap?B}KQEybF`GP(GsaXvP*1 zmtzmP!Ul#A=R~X<|r{v0SGK}w@#a^r%=D$m#ubh z{2Kf-_SZXjsHl&ElEL2iq0=(tZ<~nQxor?QM1BsD3Nr`0@6o{U#y_X=;zW zTGlhXs_X3PpO@zf`}rk#_S(0U@2Rk@5L#ECS65x8V#WDe<|?(HQ8=rxeBu@ z%UYOy@Yk}{%C#MawQ1;90OCkm6R?Q(WujS)e4E?7avxA7vp5k$X`PD}KSNL}lh{}L z#D&3#b^o!39X14O|@{lNC?M@}D7oELG#F46~4)$P{q<-qGgVumM8VkbvQg$f}%Rs(ayiwgQ zUACBCi}|&f-{?(U;(g64*q89+B5c}2i-MRrHIJ@l-?6V6@9zF8V5}>1)KlVFE*vR1 zXrEri(v?>1lg{LdVieV(1-4LW!`%u<*;GjCd0R+2KCcJ`q>^! zSB@KdOHkm_IuAAT7E@x zhh`^tsAM6WgKW_p)3Uc{GA+4B!TIzaRTnV}8i;9-uUYBJ@fXsf%LM@0{CoJAXm!@L z3o~Xohd*?#W{Ihnnlv9h?@f^gaN0-8qFX!ckDu~U07btfD;zG7=nSf5wG?7rgddY+ z-5-)Ogu82yM7-*l?sr4*R@>qin~_qfR(vN)O>rTPUI4y_@4}hN`%L~v)8yFf2y*$Y zS3f6Pyv;`Zl8p&Ya{|7PPh22jeIq8mLBz1#!P4d#>LG~VRHU=GNAAu(%D7nuWX84SKV#p>@Z<75_-SxJd18v6ORrl!1?%kn(=@Z!%Qz4Rb0psZQW1>!%9YR6S5g1}GQ<9fVeKdtPD_aDKa1y^o zXK9t*V8bT*(K}u*@h^&p?Uv29Z9No29baX0b>l?G=pUebU?j^sdz9Dm+imNSgO?}E z>k;eC$$r7lo?QRh%~>EiH5W%&St1zmMXwyhZyD|^v(!TgTGm%T>ytBGdKAgI+Wxu5 z{+Yo~if^5t61h+_V)pXlz24JqBM$Xahj^!)Lv)ScY2nU{pg-K%3)elJS-}b6&c4BM z;Z7r%74^hnFfUjVj$vvoYzOV_$I=hIjam_Qgk!yo+I!^zA3s6RfrofjOaDvgf9+jh zkZ^KE%Y}6MT_&-%MI8@Hh<%;ohY4&peUJf{C*HesMur4aYh)WWh4!;1fWVW~B8Hqa zqqz(!j6jYky_H{hKo0;ZNOar{h^`P1M8{nw4{WP()8zr0Z`^1e%*3XNJ3G7{4mUH|amz6-`-yYk+= zOSH{yq*hlg0<0om_@*o}WQDF&rO$&{mF1GUkbn8=Y~I_-a)suX3>VVkkAx-Lv?s&o zgvD#+T&Z96L`Y>L*ftdy$01RvA*e9#XUTGpeSKzDRkNRPi@8 zN2>5M3NttUthAUFz09fZqdmLj0k>$7s$qo|kK}W?i5TwPDsD2|fiZJD&67EvbvbZm z*;?cw=7^T~d05wp&_J{VimK3VDiV~pEIQ4B#e&*=qkTS&WlU^*vkh944C`*VlvIks zEEydM`b{H`m`svwc>qaLi7L=$P~CPlVzjt^brkQ^e#aJYIboNpg)@ud_n?%z*=lR* z$nt1jg2bM&;JKe_F3GJAG?t?ifRb3Ddj?nK6RoMjRb{ zPD7im3E3WNt6o9v9a~oanty8=d_c*}JsPiUPvVtgMuNWtHKH~7CD5MKPyyf6`t?Wp zbC9A>70BRhLr1arOKLtHf4?Mc(DMCqyR79jVq`k*@cyHTRUc72nlaq7d^!(Lz+5V9 z-qL|+1O-nMwx|;={9kN;=`%QY6(xG!PhRATEApp7~KTrd@I}$?^K?AMpQWBC>;GAHl8bpop57fp{ z8hor*BLT;vbGKf#k_6_^p1k*uxesi8w9nV+LQb;T2`v193NT5aImDU$8Deawn#sFugM_&(_sZXB!WRE02P`86 z^p{{N5e7DL1iKVbi>QPgkCvk-%*JZRBpurPKC|tZu~j;Be%ckYxMThr5~Sl)djDYGn*cf-$_Ku|4u94Y^!8dM~c=r5R2UcA#AnfbWK+8LSoD+F2A z<(%F10+b@t{~T~1F|fW7pwfyD4?Q1|@=qHK&njSA8o}ATUN)0ib4U~-4XvESd9z>& zc&u#!>wz}uh0pqWYBb|3Z^?B<Ya{?OKfco&&~%d#GrtRr{&r^jqaG`U~48 z7)$;8#!oU{f3@_bnd!jmwsPxBZ{cTw%C`dU&Bj9_RzraWM&CAnVf!@d2I}Bccpciv z6B@jZ@o1R2-(HOO$Gvyw$)^Bo=>%!quF_5yUf)hB3I0l?c%s_wk*4j^lj^VgYP^(H zQolbka}~4KhQiD$4_hiay$!>x486oSh)Xm=PMH2CUBs6T{_9@2eFPF%eU|WjYCcJ7t^f6Nj`axrxq!HZ0P&Z9(LN6;i9nUiS zu%{4VeUxXn6P!oIUh8nIfx5xNhcb>S)-UupCN%i>k2}zW@&Gfje|+@^hwIBT8!{)< zlT3A)_2pUd;p&MmU_dM-`b1Uf0CNexTU&kZZ;bHY=?b%C>txzH`FtRa=rNsMz2u>^ zm{_EVV-o3*OlN=lE0_@<@TkMA83J;(_N>5;=-oK|1oXh6z>UpF7ae97YJA|LJ)BcG zC`Rp8-E3f)oF~7GmlFAO)=1<%j|7eNl3e1K%qs?k*6zZYyFJd`Y*?ghe&gP+s1
&c~e4plB-Cd)UgGuXm825sL$v#W8c)5kks9EIR zvitY&fqU{(Yx)!n^AziWkL+#q29r6V-pe+s zY$p26;yu>PUwNE6;(pN#sM~t?1op<47-a=;<{JLwQeV@dUYA8r-AglR0q6UkhKv$V zqKRF(T^}87rb2b^jQ8p7vUnG>;e3}4dP`RD83<)_Y^2tr2KIyEQ=b?k|(Lo*2Pb0t263@`;FZ4$M+lk(;_DDIxtW) zMQ-mSpyR26GFi2n)dI14>TDs zsH-g~d+nv2>(w5Lf>jPx zSK4{ht=+}S?y=D-(Ayb;YtIrKw+kHE{F`I!skHm1S=!pvU-^^;?~q4fZm9-xSN=IWm#=6Sjyn z@jN-SPH&Ao9DSp#8uI@B^QiB#Xu&1)X0un>ZHfWh`rlsN1LB|Dn*c zfqewV3tbFEiG<$NCuXPyDC%`S+n|N-rzBeWj5ZGWFl#izjWihl8}T6TlY3`#>?PjI zEh4KDfQc<_y{W!{IOM8Hvu0$)pvPq@wpYkhWXn`^mMpSY#h3}r!A^`%3IXY{BpbSj0^Ej)7WVs+%V%0Dh_v5Cs3wBKoV_^hrSF4~!8 zSN0?%AawkdF&LF-3$s@rqAk5wkNBDry*#h<(bFqCHlx#TUDbh?X*&o5SWPeo?ZP1v zYPO>!Bkrocv7_Yhl<2u&Ea?92;`%efud_Ealw>RC$PTIUNC>!El5ro0=bGlA!Nnrv zH+PGcV-TZc-WgOp7*BPgYMen22!hLLO7lCvO5Q@TP0}wdVDke*h50RYM1N6G&7%)b z0oqO|k@rWIM7DDuS|mV%WK1(^ztx%6pi;wFeOw9lK>c{{W4aD$@b^xa-t=`*zj2J$K52Y{u zn2K8<8`{VEL{re7OV9ElWYhD6>rdm}ZLp(aVUm~zn~{M#N!Y}Fu$o6Jp(Q=O#%yaQ z6X@keX4a{0yc!^(7ptpC@VBYzlh@SAQc~6-{-m8XPQ+N(Zvy(@bTj~Wo-Ye!WvAEk z6wjh5`@S#S#g!h-pxxCaJ@yd}m*vJf!Cn%*_YaN&x-rk)%lw)EJz<1vmm;+>n{Z3+ zBv}vHN!it7`W(BFE{l#$w>Of{eb5MVQ>1wsdX2?T`Z@OIag0UTxn6IbL`zK8SDX)? zR$baB)ThdqtmByG2IEo@r#y6cJXXn(VR!GRA7qh0OZCbR%AAW0+k9 z>ivyl;+GN0MDo*jS!jIM_lcj6);viP9gAgFHCIthG8bLAnl1#5IJkVyR37b0Q_TgK zvU?@?a{LEA>j6FG$E`0RIEK)h!d;sG2|mfl?2OfmI}zb!Vz@t&RI_rM=|pGXM+qG9 zEe5H7H}+q2O7r~UM)*+10#rzL%{|#ntWPu>OPv#{U(FJNH7)oNTJFw?#_P5E9q@Ou z`f#CuJnk^9M!%NdM5DWzwjj56#HY*EX9nGL3m1)@V>m8z`|CIpsjiXg4j*!2x+>8p zdZdn}hQb~3lMiCYspqVc>vN;J>`HE?+Ja|jsBo9?7rny9wd2{?0&$@%HO8gxTMF#N z`ZshE{>;Qzbh~OC*?Wx7mmO=(!p|v-uSG(=DE4EZjU*k*s*Ou2}Gs_zlSD9$iLz z>*a2Fni>E}9d=Q0tnpo4P$%1klQ`C!lq;tK>_j)I#Z-Vj#ZdL-e7onF-NoJukz_k@ zhUi~hP~o?3)GGQ(L@UM58l-)29(xzf0?8sOYPqF6;ct(^kAH~zix1$<@((;Dr{)kG zCXiw#=N+EB?lay+U}S_h@B+f+g;yeT0(3G18vl9){fb&zu|hm1r2-U^!j)6++KlFwVXj z38^ofijq}wV|nYJ()-R+xFtBwd3A5N&MZCn=?QKFC)46SLHXYfi%gUhtf?HRK! z$UFu;o_fO*{b@%sb7G_9b(fEMCv;XS?^ww@n7ny*-p*t4jul7OD~H=4TBbksd-I%U3}j#42zLzGY#Z+0OB? zL)W=O!xMQ=-=#sp@_FoO+Z^woES1$3TgW5i z1%NO$r-M#)#I9o#`?e^&5929e9{8HX{b=@Cho(idCj{JE=3Vi+tevqe3v_%)a14KQ zLPPw~GBEF^dHsogTNLVREPKPFL#!e-)3+#wj8zc>U&jP}>+nI?u0rKrx(Ju3B;8kd z$lQ`}5($v|Mm*9GX1Dg-sdP6?zkOGFdnR< zF)<}U%ritiLLQFE3*Dt^?o5J~r*vGD@1V9eZy+Tv(U+7XZ>-0Wwo2kt0Jo~Ci&Fz( zulDkYpgk)sn7J=JbwF7>PXc6!`V}(vQ&$PTDfuFI<%^YexH6YlNat?3h?aYoLHr7V zpRc|dgtTm;!SHE0R(blepPb7j@M6`7 zcEL)Qr@hB>Ux-IUW``t2l!kz`vY!ig`y%6ott1zYPy8`P3ga=-)X(S0eiD2_+W^@_ zz4(1ThN$;h@BiNo(%KF*wR@)@X+& zMqvwN)c22ncCZIIgiheYX0DqgdZR zaN3|a&OkvXf_EGGmU1aaV!1z&dp?#4uoLZ@kBS>x ziDn^js-_{&K#+-vX13}hB~F(;vq9ANU-MauvhWDtAl_p0Pw4)Md72P;scYxvLsoG67^ny??PhwIA_%f&quAU}VFD}7^12ddPAK?MT9_iok7j2<> z*$HK}sH5orG+L9Si>}7gWX6|kQbF=+uF_qVCPMU+&!Q>r2?tPxH-#Ut!D02%H_{3++tyML3$?$a%U;DA#AGpU{?Yek1cUaO)22 zY^z%vxhzYXOh}{OFc%)=SoeR%LphgIlYe)!6e zpwaA~Sq@KWM&D3>Phzc^fb|?B%-6OhvG_o7x>(p~R&GY-VfpXN@Ra%=%b4f%BpNld zPR#3=-dFOeuw1@JCm;1#w~^0ytts+C!7RQ9`+8m(&UY_Vgwt#7^b{iIe5yeFBZw@h z9GH6v%~)?)>$iP|z;oLkyb*5u#M`$$Sphx}=jTNyAF!KfEOC<~!y-DADuC(|IAi0HZFh3AivJpzPs_h$<-Csxb}i);eO*`TI6{wta*zN9Wu@ ze9x!CBX%dfeRsrNf&*g7DY5Mie(klI{e*LMx6L1DYM3V5!zYo^a2)g^M08`o-L~Ky znv`i{`HUBvd{#U9Ncq0L>*0FO@8@HE3Y-1T7IMxfY#iE@|Hah$iTG!|j|b(;L>e@k z(!lu)V|W@`ZqaKqq$0m_^TcRIF}s2;G<}vYog^p+5!v7x`HhKNS!NH?D({0CUia>~ z+UlkRBuu8p=Wd=i!E3!m1E}=b4ftU}&<1s0=t(roQ&@$5iFKv7J(dybC2t`ZYpXRsu4c8s0nI`Y-)j2qPQ?lkI1zoJ zAeRdic(G4J0+#D=K~juSw?`xjEO!ZTTJGT4IH9XXBOO`(!?*zW*Ccx3EtqYK*~5E= zj~z~6Q~Zu_iRY z(VEf0hJvEh#Ightgj8T*MzON6!3~@!x9!E-E9-?MG#*=O~KwMcN2BD zZk)F0WfiTJ-}gG_Iy1v$Tffiq`+R=?&+q??-dy+hyuV)eIoG+)IW0bGQcJ_#TWI>} zZH3Vh$SKV4Hq6^(U;FiB3^?_NMCzzw3_bbukDJ6$H+_ zZu88kb;4F(cN%W9%=;WTqV4ZusVS@RXUT^k#|8&zoQYFmPmZy-7!kB3`zZ>lE=B(R{I|?SX;|$M1-nosN5$F+n(p%m`Z=P}_ zfhQiYzj@pq-8r*i?o*rbxuYN=yz@5sny_mOAat;4=28k}oABTy#(+`jSuaBitB_M{Te@O*u(UPlY)|E`T#VgJoGLW0o& zZA5>qRn8xeb2KhBW#LMamhLb|!!M{2gHSb!X-7U@ zUZF7z0;O8b$!ygh87uHp08{jI7yk7(F)Y@Lf#?(uiMdR^whoc`vY#vY+_RRi} z!dN+z3S!E47sQ9BFH{gcP$lf=T)3LPR}RMDa}^nq{`)!ouS;e1`w*A$R1ppD$D*o? zdlysbhyup<-PZI5#-TzyhGd^P4%uj9!WH|^LqE_A7L@00!(U8l^tv__w!P_c1c7dc zL6m-DGpus@+q`|a@cQA!F=0?edg5u%5lK3n&R_KON zv(ECKa5Ox{P1_xjl#YL~h^w-^p!HECR~P#OXLsQG)GIk(X8A)Mr6qQ4}=pns}cE6#4oS&G;rd z*lo5xtD_D&GINtNvh=LCP!@<7wGjtF4JC0n!kg)x+<<-Tm!V|!b!Sm7GcS9oF&+-G z8VgPAw&ys^p?Q?$Z4uL**8wrwJ#!QXf>q=AKriMHyWPV)n_nZ3=;k(4!#aw@-WZ+; zZsm=Cm~`9p7vI4#-QGIk3U6eKldl_|6b7g@@{*0uD<%|l#5W%Lo3O9nM@O|=5GSH^q7yIPd%iyvI&ZSi zqWYZrx|!HVgRx&8b^ONgT61%1n`=#@z295+a34A$b@RPc#F_hCVyk~48Wi}@|4xIV z4|J01IuCqn*D2<1gpGc+kcrcDr*0QG5F!+@IT|-(8dpn)w>O46XEy+&1Vum1Jmfwm zI_VQ|?^&~{TK5#=&6~B!(LiGr)0#~^t3*(WH~4;X<~@uy^Y)zYKXV)&H?R9@9X z{bl9Fu*rPJ_Y;l@Txgl_vp}IE_Tew1Tu8+}HMP^RY3-M{mxxOd%TLvyFgYxv#8%vyMneXI^@n4 zuqdnnh<=Cfh(llZAaN+_p_p|shl1ua#_ixz*acB5*%VeS=}W&8=As3?OfLae^_?w< znAD}BD_j=R6=($8vlPKmbM3hYFT|ld#hXw%ANZ0yVtk6JtN1Vqi4;RUHGK9b^$VoYHxy&22zzS3&k~N^LHC&Ip?d)#K~8c( zLrwU){+cKf(&GZ_F!N!jcO6aBo#2SLPiG&lcf?(e=dlY>DwsJiVz$2;N%CG{Sa zF=FLqZQA~yW`aklaaS;Z+V6g!@^zi4kRkpG0U!U)0#6eq9VqbMouZnXx5?qfCj#To zX8U$p%)|}#dmX7q#aP#Vrn}{odHt^D?y$y~xUhD|RW0_$%5=!GvNGSR9yX~RY<@2k z*wS@Jn`8ZFxcAcAVymA_`|KR%HV3AhO^{R4=D2icYHN1p?ri}=Slq&pdL_5j`o$@jn0`#C7gvlIl@`(Z@Fkf?02sVM`& zVJWCm!!1-K%rRz9p{@`0;6@?nnRL*|_$$6PYj=|%NC}MYy1=b_Ke_{(r&(86?K#b+ z^H8Doe2#AUAW6#|a^Fz#{D4@0D*RzJWjgJb9fr-YRk8T;(ZSzw_}KXlK+O)r)p(3+ z$iW`kEgNHuBmZMj^1j!bXkA<`g$0hIkmr#+lL=OUH65H`355{|t(5L4Hh-uM#1O5# zZ%kW)B-|XIQ`C041>b7wHr`A++-;5}^?s}#%)~|jvv-!YzpY_b+`9e;FL99WU%OHM zHvA!bH(N5#;7HzdoEQzkgq;ptUO6_~i>q=I4nT1wTW%E+8*{=j0g8b|d>id=Oo+M2 zq@A7#CCeSO7TzwDEJt09w9>wXU(#A&9B$$s!I~trd`$74ev4KqP1tK8L};WuYNK_g zMZ90uds{r(;OSi^7u~^l5B5K8p$)!XTpNpXpL1~3ijn#ZHHt2=XqG9Lf=GOx0lhKW zvlPFGmONaSi4|+H({RR{WMh zQ0Dl-JYw0@g+;c|-hHcK*J8v#*EH6+U091cJw{$kI7JW&w5ebawBCuuk~UiVL`C#^ zisH{x7-95whHT;CSIrocxoLN>(f=S_EabM(`>GR^&o^!B4;x=YtpdTUwdpUTc?Z@` zY3+>ec8WC)%~eVC&4i-xgv{6&%>1h_$DK~7EA0we(GT3|gldF3f&MZS^E}-)-!S8G zzjtHF9I-wl*SVnh(U6anmttkK5}GD8Ry!Szl9ydXy~B$G$gmL8{vfuyzX@@lvAk4P zO_5aQb9!G*u0^R~=SDOwb7J-9Cb+=N;%d}T8mW-kG*TuKq6C`S;w2p?pRjRIOnDy60bxh≶MtuguD-&)W+(@C zVrb?z3 z3heNrZ-fCUdv-{XsvXg7%fsLge3^Yw;aNxOBRS z;BvR)gjM3ER%1D)S7QAm$6FhRSoD`^dj~d&#p87_79#N)gQW}HaTtZ5u;l>@sPA8B zZ5~yWN41lW1k3Y9mAqcw?Ys74(hQ^%WZ*0!CzizZbZShBv*_7Pu)+K}$NRf2c+2&k zbu`eX<+HdG#?cG6rqaf*IGkU&5U3-EM$0+L8UZ7Y3fMOb^kRInGXd zRc75uEnBZ!0VhbxRL`2Yu&OJu7i+j9qj7j`!| zGOX_7VzYhEHRSp9gVTiPtNfm=!t+S``drQL`#gQo*>|^GDE|^hj)o;dBM%$Ia`0;1 zL3|1cG`@NaAJw96O5%RF!-`~f;1nEO;njE>cDwHvXXfm8<*74sCOG-b92_?a28qLR ze*88X0v(nEr6pWTrPtxu7H!{PJ}@VD5p{WSFoOTUoPBg)4nBHtOT&<$6p9`#6iO(C z3y?xe9?i|s^HWeEVzY-86}`uwk4YjQ;$Y@u$Qtx@ux;4ccoS0JDbHra%|L6l+{~pf3uTl0OfDLx1Mb<}~O$9-^ZI@gcVQ z)Blr3O26wG$;h&x*uji!A2YIfXq34Y;a~VPiqI9ky(pd2lnCsS^dA5eY5do%;B&&J z-!&8$LI#coYJd%{LW^udW8t07xhZrgP(J0Vu{cPf7{eP#EeDQez#p-ChJ&(ja4_!Q zhJ?d-9J3Ov_)(uC16nS{a^C?MZGsWT2rx)Tzfo$QN=yI1dOR z`gf1hftl#*#DojPS;Bm-#nBuLD!@-;i0=$0mV8P52;<^g0;4Hk4szKFfA}WjjOR!? zrH&4L{{lWR7Ox-S>&QuMv9Axeh^q|nJp|8z{XfWlFYJx>Ts5}ZD$8KNx!+d!sv++St%s z@4vG9_sz~o&cLni=JQYx+ZheNIH*MG3}HR8gB*?%-0ck%gd-!_f@`ikyDW5nmKmSD zFgS3ZAeY84oRlEDpxe?|IlN~xy3-$K2NH5hbcb%@TBZQhI14eHu&X&j%C zqh~<}?+s%-s$X_OX!AIS7?0kJqu$TpPeXbS2L9R~Oun7CR+BViaBUKHE4@X7q+kC1 ze1C50CpR@to|NNtccDIgcxfh;axj?g>4L$mYlnZ_+{H&6qcBY% z!ac}Qe>uLJw0{L}cJG(+qOq5eF&Yp3UQnD8@e90Q0OZZ>qs?Rw;RI<~-)}~==n<7c zI|k@7F~R6Di=>02jW;nJ;yy3+*XS@zYp@bqI)-UDz|0#7@eG|`iO(pldV5b%hqUHW zH{Xlm$QWgE_hpMV1`UO-Y;P3xhLhq`latxyHE^J?exuKc%6jM~f0G;D=(HHekOt!> z819TgiW@R~`Bb7<-$Dt6PwMw&L)#72;!MRc(GU{9ZOqQZDDw!I|GMYkf$BykAs$J< zl#%LWuh}Zp(?!kfte=AdIp#fw4Bv-%pq~&$^l&7$T^NL@--gm|RMQPkakw$x0Nt|8TWM^7b|;5K@7+ z8%vy`xLGe9W8+Gpn;tj~ud%d~j}(C7pj1}EDFN@&>EJ7l5x335%SU)VwL@R1VZ|95 z)zq&*tw0fB#S3^|#Ip_0PCS@TuQ;3oC8SFFQv;Jn@H~#^8Q45;?7^;o%3J;`wBvDV zK5qDOWMDWva91&Qkm81~U!jemLN2+016Ezc(B&I`v%o>HBc6K0!WarbUzZCGL)&za z7sH_v@bBYJ`)|bk0?qonqh;Kk)Fr&`3RE;rs~=M7coo7mQ4o}l1euQ;$w2L_#syJZ zP~9kikr&9NI4{(9q5RvS_=xWk8W%nuhm3rr0|)Ruf^g8PA^pjEh_rgV^S0&EYzb?E zFj_?~HoPnq`X=bsACXDAH)|0{iZ^RMh!y*)ZpC~Zx>6KJ#){|Ztg*w!Pj7^%CS%26 zn)l{zGrSV^3;2=o03dDar5)hV{SZ4%t21yki%sn|#VJQ(e;OugR&;oL9pjAO&sKAQ z9ui*@!wRb(nN8FEllDJlk~Uz=dXs%xtLtMBhY>0_`QT1VL9lqY=`+faL zBUz+mEE3_17hkp!E@(sVuOI2u9oSZaVG54{z|-iM9y&xMzlk(rahi|ZOGFJX-8g`~ z_xd(;dAqpc2wTdIR9~gP3@JtIF6c_14!4^y!a@Vc&|k)9t6olG;)nXOfqf2P1LF*o zBk{pWv?;xkwiLralX3;BGb;%f3UIQHF(Hj~z7Zh`FM?hwF#>Y(2q>LLK>9Z9P}`l4 zJ+Mu+mC#m{2l$T`$UV>Wl!g%yL31e4-qOS93F%a6J*h66VzC(vBWnk=|PN%J;(8%9N zLUMX)kuroeB@dpUqN#72;gG?MnFV?2Z|&eMUvuMg<{CVtN`uZ0hc7WKfml16ox3}XKC>}11Y4!f&~b+@w`Kw$FBdZ z1%hKwi`n=Hu3?kcILi!zvuMbjje;Z3`m<1+$Q2>Lbrc8T@Mgthrbr=M8y2ZuG`I-Vm}5 zxXQci&QM<|F30$u*8ls46zjJgeWji!OwSvU?x>k9um(7JAFW8&u3IBXF9G z`$`n;IA|K3^^Fluv%GcwEY*oDIm~s*waD-?DJ`51Nb^gydh?^A`~t@B|*-#k(;aaxQ$av%C=p0n2FM zLP6ss@K=FX$3|!iFOTSt{SAO`*bD*>42{q>zjv-b;u)AVL_9@*r#wRTjWpb}LM-J! zsXgy{9uuM|V?@YuIIy>Kkdoq?xx4kFjF0Rh8zXSTWxgEYDNC^>!k6HmlLx$!pl}He&|pC;j?eJ!qtDz8 zh#rF{nk*P&HqxMG;M{#cF58XeGlKFMyxmaDt064-%%FY#IXIb#D8{~+AA*^zz$HE1 zAbG|vF)P&9Wq@zMF3-4)4Xz*qZ|uyAQEB)RiwlHExZI0uUW+rEuqz%rEYM}Jo;5Gu zsb`(1(O?a-Q`Q?d1kBUzEb-is>WbC34KDE*={R5}ausAC`@(!5!D5>GA$k{ny@%~h zv_;LB2H#${6MYJv%%6!M`qHaL;fM)+TMIU>OEK zhV==C@CeJ?i%@(~a5WW&8A`vx<3B|D-4SO#plBRZC)&MxaJp+lR-S8QuJ=ZZ+Cpp#5 z<1~5i6BHxh#$0Q|-Bw)j)Npr7!`-&8NvLI}8y~TOT>OvS z5|8#{rZhFizJzyN*6|cbf5M9nNG|CJvRx?bWeLx~aC+m-rjn_n-^8l}X9$Bd#8%SZ zc~)1XeOdE#^DZ2S4i^#7tiVY}5z(L+QKFqUFYB6)tqKQ_Mkv|jro2*%P@Uc(n0^>< zU>~~EdkdNuowK`BApI2^97vPj2@`65WdA&nG4^@tj##UU4M+S=?{J)pgN?4ZaR4mf zMl24-+JyNX@N~d%!q|Ja)!2+JhYg9|yHkwy{FZLii#IV%CMN5@(i}q~C56bHii!x9 zJFflMCc1UR{r^NJNZeMJoDLJLo?)~7*x!TsoNkD?78kb33t=0k+=LZHv1->4p~IM1 zqt*C9-+5x($OXn0zS}%~W9bgM^FB=NVC^)w8wW5UXYp4gKEVsoB7A=(H3^K|V|V{hmAOVHGC1qv*@+d zDoy0x`>4nUNon4D{k)ipQARP;h|BiU8JQ>LU8;fafDMNlVlS_KyA`!17M0(_Hvw`o zd*f|ZqjU{PB9KL#27|2$bXqpOKxWfQT$*awdsy-5RZma~NQNC)PZ47fl!d4XW6yO| z4;a&XBhC+{hUHFcbPh7kmEZ(O+|ehEVvO6*WA4!(aTHkID2us3&y`CNI!y|s%odX| zVQ0KXo~Z!RzeJbql7?TyFlefFP~HXr$wwXp@tBmbD-tHp9Lj+i^$$`EE8&n!XepE5-g5 zaT#IEMfKqq)q{WEZptRsekwQhJi=eKS+-Ujq$850*N;P!$?+~fh-!ZIAPI@ZrekNP z4`K~y;eLSKE|6)aLImHR@UV@dfAC5i$AgblPUa`BOOZaTKB6&oK%}O>j9Iq5oYPTb2C46jmh=Ck(+vW1Ja8iVuoV# z#^zudB=xX9#)d_}Z}b0G9B5gx0m9FfqqEI8z~+N$Ov52##HSZ2dI{qW4`IC$s;XL;K-%@+3P9kE|f1m z=C~*G0Gbo_YKgjiAT=lRAJqx?I2}w!I~8?`!HKADEugAFI#x8*wy;vvJDNpWwV!&k3Do3E2EkstPUo{G-{}a`c5>p(Dvd zR#kiJemJTO-$L`={_YaJ`aP4cfJm5c0THwBb|T@vn~6mD97G2BW)KH<3uB zFNH{y?=m9MzL7*?d>0Xk^~Dew;)@`n>+_)-rfU$W=&qI${oQ7ajOr#~bfCZ5f%7`t zgR&YIeC~_KT7q}i_Bgo9PTh}QfE^#LP@PD{Cc_r}k;5?eKKesC5FTg1_8&*cXH68m zsplmTboJRVf?++sU}z?2?s=MFIKl89T6e+QS&_5|-~IGHh^z+rD(HPMy$|*k(YuA- zEx!5m9!c+!qFl~K(R-Bd1~QMP_h{cFdKbBl@m)dhv1A_WyM*3{(EAXdPVe;RJ>6&4 z=VOD_QGMNqNDpdfeq4!Ou?q#%DhtRxMQXrMcZs8KbK;_&Md-a$ULB5%`8d*@v^#^X zZLro~#eR|ORP-*KJn!j?TeaPh3Sr4vebs90%6M#JQ0bjB`HGR2b78n44#X_7c`n?S zM<~LVO=OU7CXvCuOd=NFWFnEiG$K*H%ZWt$#t@0|B@&7C4J9%Jqj?aK%_AkcuM?Tu zPu~~x+sW$tslVP_qLcs?G&w4yR!m|<2^LT!V=XZEs!ZQRKzN7Js$CHWYCOmiGS&XL~ z&wY4)i027B&*Awso|p0L!gB~uPN_+ogXc~>g?LKwRNz^OXAPb@JYGCc;Q0xjO?Y0w z^Lsph#)Dy252_M_5x&dACN##*5+98>W>54FX)c+7fnBq&fL8iHBIQ3U!sA4E8q~(U zLxXt?@^ArWzh^ZL&q$`d*qP^6Md2%U+F=-_7wGir3YSHYi#$corWqvie&mbw%Wzg@0i~|LLc?%i^KH|kNOCbxZ(Y#J( z%v}(q@e*PDVLW<>P>zMZ%Pj$Ym!H6#^mubEq~uaf8S8tpVhkZnu}>&dCeF|Euj9j<_bIzyWkAi6>8RD|Ee)oKGz%*cSP)CaU&!NzH1G48r8IW#5b&Y z#W?;O7Fnpe#(5W$xNAJtWJ#`4heV^1AdN->M@*}<7YV<(fGbPI>6>DN%LuWG9g-fo zLi^45?A~KYDx&;|mWj9jgEF?}4AMLlE$9kN5c(6EuEA(TEVCX%u()3a-O8AB6)m%_ zxk0R}UST8y(z+@Yuvk~MVmgl&e}6ir^wi$4rb_iie13#+&y{{YKPd%W3Q`SGbr{1T zqu$ud!!YncUE)6SA0u+r2Kh`SXc&v{1%wRyjcCk*ePd`CJ=l02!TN?`ZrZ&v%{YgE z)ul|tNnL$sZPye9O1h)ClNDxxmm5F=ltFcMH+0`XgfgQ^Im75T& zS-16cZ7tqSu8WN);7f?%!C;Mb6hvmL>vaUTn%+q_J!K+IAl<+2Pk3ws6(Q_?PBIrL z$23OVg*d3N!DCy_;#fkulOv{WucI>q>ZPt2RQ76_YJA?r_=?*D%Gi^gNN4IaK)PVJ zG~ip&U>}7i;4t~OA62dE~3)q<=c=T6^C!J}dx2IVsXaeW1jPpny)Ir=pm_F7#| z6KStHcly_TNbon$oz6x|?=PVO3_W=IN`Jt{d0Z5RxO{Q`gQp#c3SvBXE+NlYZ5w5D zNmoWMDA(McWC~fHi-@@&4xFG~p}q&uI2_39tvFZ=T8|h?;ee0NM5<7&LyhPBAyQzz zIrw(bIzK)z5H+Bt4xxTfQ@9J^Z{tx@ZN|S4uhb%PQp+Hfe!9%!muI-r4g^QTN2qUlufC23&-Sf2HR$E$ z?St{2+U9-x%@g+ZU8!%|lRAA@X04wUZueefUw_viY-m0|JxqV(WjM1<`>Xyigt?l1 zT9@wGj2Hc7Uv}mv4oI`~S%)C95y?X=&C_f3bsjkI?g9g8=^o_mjP5{t%1+%!r|!L* zgOia*QS|n!-Yc==V0ZoHR-%LK4Hxy8k?Y{I#IDEKR2!;QUrQb!kG0k!V9;dEB4`S_ zzLq(+F0m?}3MH&z_?h=}ajG27D@oep8-`b7QW7=Z!0~D{?1#kQXItn)wNPlv+Nk}K zR2^+X%3kgIj^2wPHco2B0xtxyzv}+F_iHGGLI8sUk|DcphoT*>;dy6lKxFgl%ZU5> zF2VLktOC=tBhvU!;S4hDpWp>70)54Q*BpX1CZGxs%q7STNqj34@J zeu@4NW+a{&mwbh=&^7RdDbZ89@N{EB{^sNor~V*j+h2{|34bs(O7yvD9`Qag?S4?b zZk_bg@rC~37bW`qf0XDG_vk;IC`^wgP<)^33&%}-n*2|kvJF(P>!Mz*&m9+uQ{Qn+ zl`|>gak$G#irom3-FX}bRUzi`HuxC#3-a;sF`3u#5Dp}K0Y{**KJwETy99a}h+_Ze zc6~t%VK3B4;V6C={)~&1Er44q4R1-584 z|Lpk;lBkjm?n{x1*x_)7Wy@QUa3cz9&OT>Q`NSql+bLxGF}_T`s{-Zc%#7&4Z~>hs zGopiD4mqPE&ftZbQgg|Mpfrm066d7mJ;$p%ChtCe_CdUTHTo#r?m7427kkbgO?#f+ z&rVEx22`)Jqc!j8{PZ6%LEENH@o!Sfm`>U9$!KNh4z^*HWn0(_=ot??D=Bh{@pFr zXx%WFM?Sw;(6Z;NxX}y9?N@PWmx1m+@!79NXYfnfVZ@5!>=vfOgtG+zo!i@XpE$n# zAOeV;E|MRY<^tV)qWw34HXQadZo_+g`ESE&dxTB6&EK`>tiPT&AmRk4FkcvvN7R?! zvFEFM_QQN!9w)3+#{AiyFY?s!#d=jehHphv{}n+s0u zIhz0XJx3RQh(aEHtq5UZE6lp#F81jk!OzhJ_#a zN~QWDS^E|yyg;!-+d&Op~#kBZ2Kwd)GJq|*zv&-r)}tPXJ$k+0$K<->7$ zw&0i!u05_{8PeNAtlPfBR!g&4{r8an;l!?F3-%SZl-QGY*&A7r$z}74q$GkcQWmfNsGo2A}!sC6l!2zA~fi5^(M*#0b^i( zL$iHtH&USE__FBcom60=L}5KVsm=ZhjlD7tyT;fv54eWmvd=+KpgH6lZr`reqJ)On zx1$U-?5f#LHHEG9q9|XX;zNO>ER$MXV=*rljH<`2Vr#@TTJe7MEC&!&ft9$Nrnkr<220XlIV9)4D5oO!^M#nBOH5z z2R9)xBAC4;D03VrN1>C)4}@U6Bw8%C#g#2_Rmzoy~1 zOw@(`dyIct4`NrBm~^(BO2q6@`k;2!X=Ej};671O=L@qZwd1N=drNnOBbp3>$=uv> z+&sV_yybWp3|dY_z!7w*;m)4a0uysHY61f_i>JHUj!7R5AEuEJr(sev&dVP{qa5to zYObAb(cGU?yWzRma=!S8<|%UPa37uF+Cy81rNp_-xEiuLzr>lwnKtx$|IT2wp^gM_ z^(aQC_YD%ekqnMXRfOa?4CAD?QwJm z2Q*yTJniAh7KneL`0wrX><>$h^S0`aPEt|zw3~1*m+shqx;4xdf!R&ZMPO5MhJg7n zSe=rQp~SF!25k~KueCb7C%o_3*Zysc*e@i?fO+Fz$m4Swj^ zZ^=s9i|9MO`;nG$#5fQoow2x`c!o$6+A|!G*(=nq$)(AjgmjX-*NI7ZJM|r>E=t`&^yH^X2+=!S>E0DGaU{vQ&#rq1IZ?~c`y0{@Nf%q59vqqI7*`{DRZyu5-B^$weHM4y!#5<5{Ash4(=F;Vh*_{BFx-Z&uz~&pY_5kIV zyw%QFdlc?m*Pd>*xvqv7=Qk*Q-!*$9?j>V%OPlB9P8dH4V^PdpZK8O6QPPr5r}7>^ z)gu3BXjG$iQ4s*aRVwYm#!;;z|I*epjw%!-K(_uWM0-s7UUj$jBxS|pv$-x=>k}0h z?fL40^+ug{A<2qpSqd|1A$?j1hKGJXhj<)@ev&bH-^1&pf@SR>&fVZ_b9&`POPIvk zFxMICH1H?CE2qKMVc0Mmr6n)#pBMd)w=j57|Rf#=j0jRd~yGit?n##7VA4cjI53_=*$|=$*+9VZKwB{T*JvY zs#+I^!Wff<-FcrCc402;%-Qi_boE65ak%#zWvm&)go}R(qj<89sgB)lB^$c7&vWuq zDx8c-&Fir35Nwvt`#?d_hG3M@yg5QGzOl5OPU(dw{mM#|OT=w&Ap0c0%KDT}+W$-x z+4fp+RjAKFWyP=}Y;W!&V)r&;-&D}pwLy{qn-B^(4P2Q-mzud(ftA`kYaR-7<4mue zp6-*dcfaUqGdE=I#IAxxr1ytKQTm8rXO|5-?<;8cISRi(eoe-MrxV!xPhE~cHSZu`8@Si+r`k*Wf zOcC58fFN}sLM9Y<7ruLt;WtLCdmc&LehH(b|A3OqEgBg>0v^-x>eK3nr;@W0R2gnq2~(jZu~f&XbxK@kO- zAy4whJK7K|Q^t~OL<=^7g7tSK?As)T&l=--QmRPVtEb4GS*i+)%M21%|9k=Y!SH2N zvNs%iQFdEe14Ak?hT7f?q0(jkVOEJgA>yacK?EL8ZyWKpGU{`up4NsxSBZH6!sBl< zr9`$Yyj99#Lc^tEa-v?v*g%X6=sXoCE;O1a^_Ec?@7u9c_sC96t(zNe3&+A(Ttl8k zk{ApZ<6LxmeB92*VefH`X(kx@8|!r2vuIAmmjk%f&mNTWD3 zNjzpqv<71qai(LE9b>(?t8ws--y#y@eYUU+u!zE_pDn@%SVWtf**py92%W;hxjAfT zf}!ydIvP%g!4O$Q<;jkTJH0F14=ks5SqNmsd7Lrfr+3BqLD=bCRzEO#&0-V}8KCQW zr~Fc14r;?E2_w@MH~KGvu$X;V62yxY%Jqc9d|0U@40%0a6Am{L_-i6-Kg2=ZTFZwK zYpr86v0@q*DFVV`bcV-iHF?(9G;AG&(;dgD!Q`P5v(mk?&YL452=p;#?c86`-SzAvdx3lRCtA5xozg_gEC&+NA#Hv9d#@f;jUU22pX^ z^PCNX=sSQVbUN<|(;FNM*De0vMSLCiy?tIb#-5~@lR~G-KPn17|wdwm*bL z_oTL@uc8m5->28kc;g6SaE&|NVslxI&q(e2vb_;aWJqc{p5r@>zrc{bX+BJXcN%m& zIE|$-tf4%K>?{iCO&ia_9*K)?#n2l$y{qS07>GH8fg@`AF@xgl_&HNfk$pNw**M$* z>VXg`*y6MFZTV1+Y)-_@L3AAJrQW=loXjItQC_FTygw(ivuX$~t%ie?tjq&dCuoys z(2vSN9TkTAaAqw30kz-5#$@CPGVZa?)MHq}A`@&wgAxW#F!0)vvGwar2PW*_Tw`p? zy%VyMsU|m;)a=up=9aY?d07}j4&5G&4qP(TndlxQ1mUeRA>#fH@!g^tM$Yi566d`N z#i#y+xx@TbdeAeYcn^k{SUktXdjFaUbX1AHP0PlnpAvk>zsU9&VL2p2$6A!d{h4o& zpt#hanIfCi?(OsqBcp8H(Sh5SEUx+FmCnQ6GQ|BM1TXFGWTXvZPDr(V;joPR!md|9 z-a4(QEy4XME``r-47Xt!NH1>>*x0KF3!8Ww9Rt-I=ldJv{sRg*_pG{`A56Cez&OHOKxH8Z&G!YY7Pm2|IF1X*w_H;wLTSVge z59J>>PPRZbyxsi?tOxr%1pSXUCd#R|IrVmMNE!PnGLFHzARBg^}7L*pVCeC+&)Zu`)x>{-M8$lQnj=dS+~BMWyPEThQVlOUU(hP>9(G z&fC^<+=6?G^m0hSFche4(}IGx%kW`c*@-HGo+X>$?gIR8bEoOA?T11pS!J8|<1|A( zs~zW7K*FZGqLgD;zuST3vNydgXS$MF`rDE^;4R=D(mcEVx|X>s7t>XHZHDIh7}|^Y zFzW7)kXoVZXh-|dY}K(jMeVufHiS&jwOh1>6X#12Hg=&rvjs} zP*7-2kk=!3R6B8&E$m!6fOhsV{naDcX6TX* z%fSpMTgoCK7DD`pj#4Wk5)q(ZjHBlWkYTTp7h!^*eAIh zUrcB?4ms+{wm| zeoAZ-p)@#=I0`i1Vmv^G4f!d?q#h)3UzQbRhV92z;^A}yNl;#LZN{}&*$v8v%4IIT z`Pj#Z&t6YFTu4s{|I~KKoayV9es|9c&@2purYzv&u>lrH7-d$3l! zu$a^?!`U*xv~~^7_Ke;RIfZ6C2Ad1yC^ubtZ2hGwP}!ma#WMnj6aBGrgYa9Zbua)-ipO z=_aNxG2PAd9j4t(KW5s;bcj>No5FM^(}hgSnbtCWlIgFRzRI+XX%|z2>DNq$b2+Il z*v#!x;hJPk$YK=jeZnMg+>ZxR<$lW;>0V()yDydI8`*y2G>MbTWjK%ii~Y@&xS(EA z_NA#_tgm`Ja!P*O~C2K@3;2m0UoYZ)#{qUhJmn7(rKYYVaO zVochv^fMSoNvQROV9Rf0_*fYhzeL8$e==i|6wxm|#NHNy^BAl28_R%^~i$gxLQ!1aA$&O^j82><+PS zW31A1FvPwi1iuyHzAFU(B?Nafrr`(u{uW|ygy4@ua4%z3K3|5|_l4Mh9b&KjR<;-A zKb*13uO$S>gy6Uk|M4L>F$9ka!PXF*9D-9q@OZ|mywXGP>kFw8Tto-LQPGbMH zj7Kuw#MsKXjWJd`#Lr-SIpetB%lMT4bjFxeiJy(JDt`xK8jRB~FT{NTW0k%-#)|)J zV64i26JscaiQiVnDm@*HRsMS!k7M_VFUs^MGqy2S_N9zd*?uEq6@C+AE8BN79?RIW zRfeC!ID_$3j7u5Q(gpqM87qFgk?~Mzt!-tj_-81;Yz}eX#aM;k$5^$mxIf7Bsq#r@ zJc8p34L_8>h4QD+{vFB>Litl@`_JR}6FB}wixwAF7cWu8r!87kw4}IbIcQ~Jby4AR zb_QJSS^}&L7AmXQNCShT35u-X-jR~8q!DyozT!d?+n~ zz~K7pHEsHIO^B6ez~C}Gcik;rR#X;O6(YTa(%_ETPbwN^S{fdD$3uNG?fR>qNczOUYSHV6NPa2+yc*fzm9-gKn09?PUJ%HyP zJOwLDT!o957heIt^eb4oI0RFPUZEAZR#z5RlqhFf!SV`NSgT!yuCgNQQg>n15@A`cUg-n%9U4Z(ejFFca=zM0kws~ zs&Dtt;Rky!bICHNBn0C3haPP0x0lKPwooXXvMQGwC9$Nex^j78QSpl6a+h^QVWrAv zajEq?k|N7J;4V;B)@%c&LIK6h@m zNCoU=z4}|lKz!kVsr{>VEi0s0L9L)-MR}Q(+E8^FwTgm3A<3Q+T&a}4Q2V>?9^j&2 zMPYg2(&8oGRj8rvE?B4(U!YvSQF4LOAK*@ve@Hn8yN3HEtIG>lplg(sm$}NoIqoa2 zvX)d-Syj7G_JywEnle{V^AC1~aQuzu-$v(m_q`HF`Tj;ZenzpkS!WX4n6;=u_+nUdz{i5(EhsFn zC||vz!d-1e{S{YNqDzy72&+ zvzJ%fm!mHI0pNwrXIB*$3uDGN7Q5~!T<(@;WWnZhg3W23K<3&0Wnxz%3STdowrdXR;B*T~h&Q^m*{U(4H zuF&KdoXjSGyB2Ed>q3}#?+WD{c%rt@k3_#{R&y;f9t{y3@dEY3wyOcS5m>bgazm<3 z!T9kIOnOei_M{yZjGqX>q)-)X|5DgM{O1rH!wFUWJ(4kff7Dl}7`a{|${6KZQI&=9 zMP5}!QE_#(ragK8KzZa}LU8;hY42p}V)?A1j(yU83)A?)@;jK$l#`>4dO*7S`Xxy- z{w!(AFC|^VG=}A^D*TvFrM(JAQDv|25)NPCWX8(fLC)6}rplbY1HR9Uvl!=rlAqhzd=At3pcMWBJQVkGJQU6fJXSoFct+u=!b54T#Y1Ub%k*JTGWX!2 zaO&`of3#E4BcEgZb5KgxW;_(n3wX%gHawKBLwLykT|8vJ00l^M*hCZxm5=(_UXkrX zjh|{6H#3cURoYiFZB|r`vtE<-Hm0>qo0-PF&iRwHamtt%wO>HZkLE4`Iyd6eQ}gjJ1jO2s(15d4V7$APj_W$<0$Q5aRod%3oB zfPa;u@AkhMF_Zriq)VlMYC)FTclnni`YNPC`K8gjiv7FmuSS#teW7IAkue8CxN!Uw zpW^0eNV^N)%xZ*Cg}7ykmLNB)E|8`Rhev*k5C)Z`3p0u|ghu5;sak=OaU+f*AlbGC z)>I&`)ClNnOl2`}zB4d|OArpV*b3CyV*IVSKsr@}`aaf zSy}LWh*GG&i7IZQ7cMU{twbIx;D=^bX(%}=qYH5X*^*WLs5l4aOMV9^K6_R1?6T#> z_R6wq`z>>_#djtS<}@uWtSX)(q-U5jnP5id-$v=%L-zFjBAPQRx3JRhJII{Gbj7z| zDm%9#XI8cYPQ`b~URjw}vAnEkHD-A{IG9^pwW6#X^1O=j++tU0#S(Jc-7NX2qJLwo zsKHp#j~Of48)C1u$ncbX9Ao9*8e*S)%WSQzyhOXjev4LKSWYx$4*pz8iNgS9{M^Z}-|OzW7|Gkuil2BuFk-N|t_ zJ+hqQnILpe))_Gn3giFWLm?tmT5iH4NNyN-O99?X&2L8rrKVazC@;0 zrYTI*nPxDxF?BM{W17#jfN3eyN~SeT>zQt1x|OL4znO6d({84{OfCC3y-d@YI+*4# z&1YK4bR*NPOy{`EwcLtwZ7$?A^RYHDx74l8t}4^!6uLCnEh$`$qj@SMrTT!oYBGe| za`{H4FfVmi2@AZ#zwk%q2*+K;=qB{P*~N>s+`=l&URedYTDuAH-{fAd+1*REImMOQ ztynO*rDCP#C@v!J8vI~wU^V%x5-%`=|DsY*gi%K0SA-!ZcnAlng}HmFyPBtT2-8)( zVsSBzXX%IVD+nQuU}K8UkL|fwhr~m_+}w#vmP}kN!X>-}zwy>w{H*k{8b31DoX%-0 zR!pNAlKjmP{2u=C4Je1vTwqG%eEcQGI2R)uH|T8fO+`70uM$!EPSB`PhAaICunec# z??Q%0^1FZ;g5~f@Y8dcK<}$v3nT(gd>;W@!cN0=T>7I>{7lY;kQyJRvT_Z}EQg{>R zlPJ}N9ds$^9QdmQy;Y10H0>6Y!%AQW%&BH&_-+v@g(KskuO!7I^A2NCj(~re9{Re<1|icW$1PH>D#MZKq%kawl2m$WbfZk9JDL80=60l#etAsk zJs1BbqD+_I@9LoZkO}=8uUUl~a%c6MkW1;>sX4W2hUc2^TPu;p7K2qPX-ur0dV~=n6(Gx#@@~Nkv+4z&6{>RUr zeeU^9KmX7F+Wd=OzOdz2zy8f{fA{+rxBlUee|qW9FK=twzT=fwUweILbIY#Q-Fx=# zYir+s;NTmF4tI1OdGoEe-+8y|z4wp)<*y$c>pp(seB}>ih0mc=tVv3clU`{s-1P_|X6D{QuvM|G!=S_L;LBS+j3+X5VykPVOzY=G}Jt zoVhqJF@M3Gvi!f@{{Iu@uZ3_w;&O`rrCh-7n_Te!00%U!_5U+{&!6I8L?1Q<{)L&o z-#_I)I6vPnzrjA}-Esl`Kg>kor~7TT1VH*)noGtACH-G9H_23*Byfp__4 zGP$vetbQhb+37%!ZDCCEbNV$gR_lPxj7e@mzc$7sJEva< z<46g$F2*Fgre8N>k~P!MV60@#y^KjlO}{?ILnPF+V=_NVkI}-IWGD2CV@$Gb`Xw?R zCZT3!d=cXm#_^2P87n=v48|m5r=N{6tufHg$#{f>S{`GihnUY;>G>5fCfPpyN*QDQ zO8hDrU&^?KvC{LbWo%{pdd8PA-oSV)V@HQ9uO!AB**=-^CdT6!Z()2n@Y+)S6xQVfuaU0`s#%dlD z!MKa<2QfAn4`$rQ*uvOyT-IMCV6hWdLaWm6{(`{k&+rs#E#!ZaxVBE%dKI1ONcQQ5@ z7c=f-yo|Bs?=t_(87DGc%Q%Ixi3cDVjKdi#xyxY2PPUI^oXW;~v8FXK$cmJenAuV*Jn7@K%NmB%=o zaRKAOj4K&OGOlGD#drhb7{;3zk7T@+aVFz7#>*IYF~&ez{0zq7jQbc5W^DOL<~NdY zBI78=DU4$nXD}Yg*vUAPaXw=c5AaGEhcm8WJeYAk<4DFE8AmbR!Z?O;6XTJL+Zbmu z?q+P_0b?)YNHu^oWPYO<$1#p!Y-K!>aXRBn#!kj29#G~pj$~ZQIErx%PgnMgZ3;W2eV)Qji3=3Yk+@RfJc(-=hd&_k2F4>9Z(=N088k@U zX%#`!NOy&PN5FFwJ7m`YM zv;srFGUSAQwCX^=YJYxYJj=x@3h8Z0T*B#G$@wegwT42J82ySlJu5gJg(v~~En|PA z)k42g_D^~l^jpU6mvMO2&=#Seo5OW+{FNNeQjUkTE9gfnEc9C@R*^`*OqGYUU%}=} zIe%2n%ix~$O2{SYfXI@fP)N^$bXVpGJvYMCKGL+AnwtMmJ4w^BxOK?(k*3Yk)O>{6 zNg7t4)cl9q3*jsZ3_q!*aif2|gW5|PR;AQ@irNj~+of~Se$rse%^lbcdnZoJCicgg<>5NhRsrad#ry+G}zDez!uqr=l_Y|MXAGQB9xc1Ly(T*s8 zyo?Li_X&I_{^kI=xcyQ82yQpjPtxE^<%`^5Nux^5+3mjsuxvG zMIxU!qD9Fc@sZ$ql>8(RqU0;o;uZ)GlD|-hvW5Ks`Js}T$u(0h`fs3KSBQGL#b3^n zA7yK|ar}~>%<}gaxlS8MAAN@?wtRLk`N>Rwz0h}u_?7B!l0O9O%SHZ!`2)Gn3u-4+ zesf?luzkyZ9pGcZ{a}{hweou@+gYIh%XT)?U*5s(CfJ`Uw=ATcezIJ${OwPEx1Ij} zB+JR}Pq!>5yPt2#atiQqSx&S3{jXZI6PeH8bjb7v@~P65$0aP&mE-R>GF@~0>6Gcp z2}%dGw?MoyT{rsEtGF=P& z_R{|>KOa%$tiG${pK4;Df5k;;+(5Q7{q?HKPw`tB-%b8}O80a9?MS+}`|DM@&k0Ho zg@31C??Sp)y-oGEn}h7heV(6>sQjq!ybRwSlz%FndH#MO?PrGg&kM1?&EGy`_=;=F z@iw(K_1%{C&Y*Tc@!b;i-6x#q&wpT?FTeEEE`onbZmjselGBf8N4%lKD}H!$ALcoXB@ zjJGoW3*%kwHf3o{F#y?`*#aQuWgYlo(zK`*57^`&-HD0uQCCk^#_G+DSFylnFSL-xt z-O|GLDQy2=j58R&!Pv?8w~X@{w=*tf`~>3~#yc3-Gv32^BjcACZ(;ln<0i%(jN2Hi z@p2dAX0|sNzsXpw!$vahWBX3Vmak=b{grVd<98XSFy7BNgYlmkI~lhy&S(4<<5I?( z8P_oW9pie&#~5#9{A0#j7>M8BCmrEJe)IKQ5P&h|BI{}khT#}2d=T*K*`!8o7of5^C$ z?K2f-`#Q#PY@g1!hV7qaT+dia@1Tdn*v|kf0}U{_48dMU&?u-oe<$_6dv=*?vCb6vi7FXE1)0v6J!h zjPn_9QT81EaK@!_3BXAKTY5&S(3nj4ffZJl8U=;q;DToXGa~Fiv4y&bXG{k7k^~_KOte z_@*#+vi$>$mEM(=aX#BGW?awVU&^?Y?F$&EbNG`P*RcI_j5jd8pK%+9cLn1uY+uH> ziE)K;&)Ceki}4A@2ICEk`xyV6vBfOQ_m7Md8ULDb3gg*~GZy>W;$q@~Vo$%Xsxf;pLF_2Zz5LlDJ?@GLK*!IFUl8 z_Xi>NBrOegPtwj{tfbv4{?$SGC8?*B3Kj*)1Eq=lyBzi^1X`C$L+Vxdavesk*3r5Q z$scIHwERiA6rHg_`=0~h$n_eM4#*#^=h3-Gs$c#u_TB?5ilpoNt|n)O97Gur1r-nx z6;ZlLVpc@NtYkq6BMb7gy582q?^Jblg;S@hd%C)3XkDCPWu={j^_xiC?G`rSei_ZLVYvQ1Az?rL+i*X9 z3;xHj@;s=#9~`e(brRNtM%Uea5boQA;V!YT3E}X%`U+v?YNhhNP()q*h5hyI5yG)` z^%KHlaJQ*p{s}+jFRAlOA-!@HPFWs(&#B?@^?oj-pHSzA!hIR)P0IAt{`76@hT#bG z*^l9$_y2f2eXG1-df{C!8Y7ff-`_%bbY1@m;g~wV5yJ6!_q${qTnD)W~# zuqSPg@UB{Y`I0)n6^^HO#~bFK^3(p*2KmwZ?NJTFbp3?(fhD*b^a*qkH-iSHeo%azJ9`bP5ro} z>)&*oD*IPhhibT==1<41unG6Km8;rxy_mLNxq42+bi9Vu`LD1Zs9en!*4JryN_z?G z9rfc?2rF0Lh4pu5+__fH+rm29=sJ4|>t18)#)q)JR9}DP`j0Y(hMh5cDD5Y#lZ~yL z$A$Gp<*GVeU#IJj%GGwdeofcu>c^w7PN-Z}r~B#pld?1#rt={ED{R90R>S?mc&pz} z!|`?gC#(a~zrrT04=bNO5S~Y9=s&`Gk@6`8x}TQcFg?w`em)S^^XltEpJ$-sMd=U1 zdUJT4y@asxX$OMyov^xqX+&jy%68<{3)#KDNCpArT(g{Ee+Fk zU;6ES%KFec&=kU%TUf8Hx3}>9A`QcI{a^W%3Voh~w!Yqfgy#Y1Utts0kCjiq(C0*G zf6%vig-v)~KvH-97Q9FJ+t5A@;~TbDSl>@YErd;2hp(@Xux>!-6Jds_4+|qm8CD)I zgk$RJBZTQUb_kmgPOh6Dgs`;k{3)#a*O#w23}3*m8?3?yL=ny-&yH0Wc6h)0C=DoD z<+N6hhQrDaY(KVf_{rjy5yEwTZh*Hj9ye_);Z2!I7W0YJ1WP?H~bFM)oz7F z3x|I6Gpo1CV$I{TLhNmR|9X0K{JG?Z@jKrqG4z1rS0WwOM_jPn___Hn*DgywKN)Mw zzFR#?mCx)&LhZ@RWAqM3yV-VfQB~4zcNG@6Y|--Dy=|g()A`<4 zW|!(-IW^5e_x;3^In8(WFs6lCKe`{h_R_JiArH52AAVNPU%h;^`Qd9pdHbtd&RYmN+| z8nt7<L zqstA*annFJfiw>pbd{HTsHkwOD$n#UqJlJY_;NG(*Nrq9q zdpV9bmsP}{Sm@XP>iNYE1GhiB?eJ>Ls?C3RQA2w7Ec#F;&uTX1biB^sM73jnCr3C} zcGoicWHTi9tE0T^GPlXaDtFxN!!!M3Cou_CqSYbR=_wl*5b|{Qfi5XoQ-3q(E|2xx zJL~bxHCh2(`^4yNe(-JKZ}v}2KE${>`0ri*$tUyAlNVG57x$03x4wDmqWiwM!-}pOS?Uw7aMqTSXt8p;* z&AF?u2Nr&Py4#=moIhD?pWHm@T4q(ji(Tqwn%?1^C-fV!@k%?NXZ_;i4V#v<3war0 za=iJ{JMSH*ydRj#j{wdIRcWb0XD)cxho`4K}z zuP-ir6MLrxoAhGujBVM&>^ikum7+PJ*TT2moX1xsta$CMYksP7k)B9Cs>CSnWOLs+ zRp(bP)J@Kt_+5RvvtNE}N^57QX@fS4KHC-%?+IsLh8MgUlXX}0a@pl`rw7gw4g7OR z(aodPgWFCmP_^pXTeZ{qK2|%(Y1?Ooz0=oaobPiufkx7NAkiGJ>Fkh+IrMt&cm=` z&qA?j$Fyrte6JmUY{!}ndXhJL)XVbmb1uBzFf6Ezs#m|)zB>Azq{LVcg=i+o%uU<@!cOT|8`)JRy4)=ok zym>J0bW@uu?aZz{$K2g^XJYTCTB|m#Od(laUI_Q}ycgXR6{{UhinP=_-eG3-_B7L$ zdHriTEq&kI%(ZoIT?5s1ox9XJ9<0e4KiF?g!NwO}$tOwzCN2w?E--9r^z@YYdNvc?*_u4v|KL62tE!eWB`ZB22E7T|{jzVhg=U-0eU3i8vt6g($MCto zd^m8Wsdw~E@~hdpS1Hdgm9%U#EtA^jg~j;D>hc@oyi#-DwlSR)-Mg%vOL0Nze0@Wc z*qq+UTMS!-dYdp^bh5@>YU;eHgDvo~?rdA-?wt3zt1T3^!Gf8qTf-5&PRUVTC3;pzGPU$xB$ zIXQpJ;5P2QP1NR?zC2cIQuAHiFg;@Qw+^?WQv;tK2-kq!!tzI3>oC^~pQ;yiPBwnP zxvbsrZ1wK2XAdJ<9ad|Rv+|ZCY)SLyCk~|Onl^Q6Y#Tbpq+osL>$8$46^A~#bKy+u zC$SnGXBOCh)c7s>eL@eb#0T?!OPMMi*9+f`JDm1)JU2gmm;GY{+DVZ<$5&+C zTJGd`&-!)7dKa&Xv0mvtruuDto!;}+H@g**+p^DBFR0(yIPXXtiG052Vc$&&T3gnw z>AFCAE_i;;pfl1-H%}J$CPdsDbZOYW_HS!V;==l$vVL%I@xIl4i`NCdu6j6bN`z+X zwQU}+wxi_{(X1mQyM%Nc|J-B4MX!Equ4g)}`La=Zw(8{4*EVB3UX0qYYEG%<*AH2@ zb{rqdUb&PvUBcO}z7g;IM$dARMPg3u$fr*8BfEE6x7bK#=sviM#onBX?hZ!=wV3E~ ze@;qnc1CBDKOZ<}cfT4xS?z`W@<6^!k9-;Th-{8z8!p+Ov84U6EH6$5Jmf<{Fu1+qj=6^3R zkGapP4foTe-VM@2{ki?Jy5q^u4d4$9t>4dw>(dY9w_88$FRBZ#SWdU|bzvd>>AEnV zUFCv8p7`?(*VljD@_$TEKK&Ff{wa*h8pWpgaj+@M@T*{2gMD@BZ=N}b`pv2fukBx~ z*{nx+U0D8o#5wnjE_Gp=56x4VA2k=9dX;YAhbPzQGkpW$<@U-%0<+f~8AkJw+{_zG z8M^p@lrlFp=NF!R+%qWywN_4}EF4!mlQJWF0;iZWNjV-02g>Dz1hFmo`MQv7u0I?6(of$MpW zZn}Xo+vp?D45nZs4a+>HZlV+&=(U;B`&ZN7DGQf9q@kQ(cp5n~+JYC0k%SY_xng7UOKMfPpSe~KGNuI8oZ3_5!)l{CYmREUtzj8Q0i*O)8P<}FM2VLr|Ya2JiXs}9;WdUt%W>AFJAG?oz&$B zjnBBXkSF=@if8We&PQo{en>XY-0d%T78ZIOXr(&NPQeFYZ9^315n<{8Qq@RZ3P^K?C}eu{6$WG9|Q zHUorkZn{A2T%Kg`8J@0lKk)S4Y|PJ3uA4o02F@MHGkfqfp5D6KcuF>26~gxAJd0*q zpP}Ui-sN}}=|%C(aGWi$bskTd*-c^p>q?%Ib&b#R_VD%P>3u1hXRg;Ao~}#tc#7`b z;2Ee|$y01@bB-RDZRE|fusnjN?8Ho-Bz-$i_#e;Q+!CG=V{?8!&X4cJGh_E~p5*Bi zo`F^Cc}l8J^UQz!hNt|9?gd(2#$*?sp)LFK%sraSQ^GFcDc@4SQ(SwWrz`tSV65#$ zdc5}(AD)to2%hq|={)oOe;2}<=XnHWKer)%*-A?&b+ zXGV{kJVk@cd6EIG`5Vy>2_#9xoU{>t~Y<|*ss$Wzp+1J56>&rolETOi(TcUWh) z*G+wwcCm(kcn!Kf`HlYKD_*hj;hp_UZ+H!2J8XJ4w9HHQ*wZdCey6;QHk}E*JgdrU zt&6QVG5nj?nSx{2-tZT756p=8#ELa+X32jNQ+ zMXW)>L zJ1y9)qt>48lV!wCEz$@*Wn{rtYPId+qTPn|eA@c?X;TY!MZe^4zaMJB{yI&zZ$a5N zuf@83&aQ0Jj5X0q@BSva$}3^;8Lh*Yny|}y235T^Yswb1Z9BxLBeMCNtBu%< zk8>Jt8D-19xYx3(PfvR`N6+rAc$qP){ao$0?z*km+NjOjL;BdV5&b(@W$QI%J2(uf zO*5?Yx?ppzc~+2!9V3mOG+?pJOR$e6+vT^eAGYLcuv;ILH>w;eV$E(1Z1MG(1$*qp zu45mD=(8KRnEP$!G-f+?^!)ayz@Akz-W4`^stLQ+@NJJ@<~p&Bhb%RdziY{M_@!;a z=51E&K#e5rkR%s2c*lgAb~Y{8p!H!_TSvEKT^&vto%6F~J6s&9dni)OZe$#$>ACB$ zx6>Ew8#v#ZU9o!Ps;16wy~cc-qW>y_V@)Ia-!oY0!s@jTx5{qTlFb~M=8=`?#^!C_ z-h0fj&a6%9{D))giF_Ci@ex^+649Yd}nU^e3q5wOt(9i>d8Kz@i^#=fft)Mc!uoK#y0F4oxBa1Q&9YdwVo8%yBa;s73d8loXWF{3 zn+|m9k+j%}eKwe#uvp!Lbyr>cu0?ShRy$<=kn#bY*zTu7Die=)V9UZv4DBx3v+D!BOr$PRZ}f80#>yH|j= z&Fikg?b+$^8kMfEIAQ3EP!^tuy;a0nAVhi zr6pc{{!(W)@Wv5;b>hK39TVE(#KP9>lh%K>cru|Cdp=?6xqhjZtUSi>rB^>&_W9!U zlnbF>yt?(zxG_1p#w+p37^{kQ9oezJvZbQ^ZCDdg|0+Zx@wQ>$Q|{8j}aNV=dPjtGu@CpcqFT*q)(U(>7~zY}4SJ7suSYu)3~32J$~V z*oa2Ar}_GEYzJwlk=hZS>|?6~Av1j4+0FNMGm9>CU~PVDEfpuMEVUT6ig{=*W zpZCzKEo<@CZq}_&@Y6?Qr!5-NlAYt=GOEu5efCs)jaJ2*ioM*sIAzIw1K5$>Dt=%2 zsTKPowCAr=dbMC((*2(K>bGQ>ekNLChu-Y4tGCTv(t5Jpqwl8=5A8oE`vN>}=XLkF>QNL~))0(~Z(J=kwB|lcy&-+yG!j|mnyD1ZTC3~~R=CV_2{*Bn2?pIDk zo%Uugx4-9OdbSKu7v9CzNIHEZ_#H!CME)_3YC zlcs0w*h$j+eb^NaZ0p~BV%JvIc(v6kR{6b0Q}#hh=EPn*`?5N(BWhh^`?765-8-uB z)Q4SUQ#`eGj59lNA16dl{foO_h-a1Tm1XZ$u(=3DTUuT1$W#)Ho--Fy8qKNO@INzRYqWlroY35(i{5z**J5hdI^E;<9ziZ{$x3yg12WF4v zrCRPib8te&u3B#Ne);yc3v0O-flD`8OKUmlOXj@ipjs~3`eFJ)&swetzkxCa|7*F~ zGb*3oe))~N&^Y%YbKx8JAoD?uu}l`Q(kxjPjE|2h5}w>v2%W5@7sT;rQQ z`2UTw9#*r#$>AF(ElgOjPwyLd=9vHRgpwLgcctpAkh?Y9>x;~i_@g!4!lxtE_iU`; zd}mh=Z84{YvpG8s|7*AzJ@l{T46fnil_!}69O83bI_+yw!^sLWRW%K2xH{E9%>+YOd(uanq_D)m*dhmaUsCuIA>nyYCe@shU&&boBYNsA{fX zqc?jq`&M&TH1787-m#jyml5aPL0rvkw7b`~mv%Mhn78imjFPY1$n2acZytQ*8c9xD zOHO~~zP+E>rg7d^?r6lA(9g@hazmcF_J1|?D;IBFw3&(j%JrNbqvDs<`{2f$!{h zRdK@ou`2F+;e7oz)2q0o0=>!?k}7W3_~*KZhF5X2x3_EZyH|0qoK{Xb<6gzJd{)$> zT3p55%4UrF=~r=IEmti#URB9)9g8Ofy{hDHSh~(*Zd7uf-*F*ztdiSSI@a*Oj!JIT zET*Dxc_r5~f4ltUj7sjnqQb)t(n`*9Mi~BAa;6$-CrH0a?$OcR6P9+V!EKdkXVSz3dCuxZlwJ&7XbY%G4+Mj4JxVO=Mga?K%2|JNZGTWw-MS zH>cB|W4EpR!kycgmf3su7f%1o3L~xbFWetn-CsN${e_E9VNP5K`N9QQjF4XI`h}ZU z>vLM}{)IdIIADsS{TEJq;;Q&GlP{bkQ+K#bkT?R{;1%xlLlPtvaW(FEXwliwh;ScGR&=XOoz4OD!3}U z{$oCbRd5@6cAnlZsDg`ZsypPIZw2?baCSjRy9&-boEiVMSp}D~t8L64(+W<*d~j%{ zdIeYG+|oU=qMUneR#Y(Tbvd``*@u&(@0N3eM8CDpI$zE${Mzlz;e+K|;ZSC*`i^pL z+ofj3VXMozkApK^ug@>%Le&Q|AsOYI=h(n5AIFt*&w?f%TpCf%3HL+Gx!S3RUN!Gl z&b>D*>#p6Yoa?2&CFFana?Y&s**nd~<=p90Mo;Yw5e{VVznojy`%(CU(lV}-dYj*6 zFUz>zP7jaz-7Vu1@|w>qxKPGDU9kBPd!&pTb}T6Oc3v6RlS$n+6=F!e&T>l{>TkI2;ai;u+etO?!$wIz3?pMzBV57ad?|j?wF``&*#lbxj)~(9X#H$l*_*6%2?@_a-k}xTAgD` zxtKQVj!dg4;d))GUDx-03AZ$(S7CEm36~q_%;?+#oih7WpD*EzIu4y*b+m-@Zq4je z+grk^A1_^Gv$cd{HzkL&y{r^UniAt?Hw`;0+UNP z2j=#~JuxL*@uCcg-N+Jd_xOdAR}U!R7TNc}{}OIksKwI#Yzg<|@QSn$_Y$s^ajqTH zs)S3cZkoB@wuE~%uzNV8T;t63J2kErb3^n;%p_pUb zY69m)7IXRzDWjSUE9TDM`nKUf|6;E5WgUZ2-HSQ-F+R?e+&zUS-5aygg1>eblt4|2{Z zbm~Bj7jkY%z=AE}2XfAHP5Ti4>vFF1{lopnotJateCEvfy-?1bTiyJ_tNn6r-v^hZ zHhFUHcTtQ^=4Lq;`fJ|i2dm{=ur||)TP)|EMWyW7K3C376ZvX2ordtcf|hH4k#lFh zbkk{(Ea$3?(^u^qBj;+i7GLiZF6U;XB;|e@BInfH?|fO>U(TJ5G{4lNhny3s`QSg& zb(^~XjE9`F9%;L8p{tyG+j4wOq@$eEQd7zF7R$Mg6Z&_sw?uk=lXDsPB5XA+IlYg8 zO-Zb8C5~=O*U^-luztb$DR`2`AA7bWj8Y#`gvAGsRw~59oUTGZ|J(H&e%F8WCo5QZ zkKpG_3fBwoKNTtX^M3MQRG+_;ql{?v_w_udsNY=$WeR3Q|9yI8c0bb+5h@uf8J4g) z)m=fleCLNHr#X=05O2JEw-C>Ph4POzp-=Eg6u)7gY$`)s@DtQh`*{A*I@uzuqhhBr z&K|#*B>oLX!-A3!hn3A-28%KX<7Z-|VfOs1EU-JiHym-Ut`X6ru`Y`+t%r+WP_X@A zKOd+>{!<){to$*KE}asxq2aMT_+zzxildRDpW^%wXNfqz9(=hyXcY1OF&$q<_aEW{ z`7#0#2a_o395|RB8~S6q!8Bpak8yl{k{{xN`TWv;jN{W~ATDDD%D|86NW+jB9V>tk zwogrrlk&d+LdOstON#gj6DCB4!>mNhS?FJvBx!_w4QvlxnW9cUa0 zR3X_gs*9qz*`X>#M#~FV{MJr}3VCRXutbp{SB2Ezuu(z&KD-t4k)FZ+q~xRsd)Kx! zOW7YPB%`^CGLEk!jsI60W62Q9a6%XZh8Xw~Vui5w39QZI0Z!V{R(K9klbOYbUGeY} zpH7+H4*OfG5({RsijG7$MrMLETJTfk#i%hvjZzzV(qnYgh>p8H(Mi&fF!;h^tt2&x zYN(1UqoG1H+&~qEsJIHX<&T9u&_tqbs6h-PwTYp-7BLLcmCzsLR6+blTRe9lCj2<0 zY|#{B7(&M-4J+~u)TZ^Y!vQ#s=9dV)3c?~CC0}kJh%Z6bch@HRZW_3HQzvHb2Zw`MEs}noTeBI81l zL|dOBc<`gHF0)$W-Bm~k>O#qAsuE3K)j*{kR2WsFVoo#|l)~q&EQ6A-hbHQQy13~| zgmO`KurA8gAvSLH<*KNt5mn@)63R&Ue3X6jPqfk@R#I(Z6{$t6f;EX1%CK@XlNlMR zp*-|OgMK*G1@ATBfsqe%KE##`eNuxkqpd=;cdG<4jrB-lsV-?8sY4nEYm>(4&&F<6 zatoPRAtTZtBB>@3p&XH$o{TZpBF4cw#2956yBWyzB)XxDr4F%-)F+m~dc@LQmskdw z%8g|r3E$5_@E@grEz*!!HK9IZfV%1v!$rCh>d)S&o0SX7jm$;aYe{2f1>ti>|I+=1 z)W7X?h#lH(hj!bc-FB#tof|%Kn`#kL#F-+_6mh0*C|6yDsJk;nu!+zIN*_Cc_FsZ% zMY`WF}Yo{LK(TM&j0gQ$_me z&{s4FD{K+?@A4TEjK_f>%6zm?hsF?P*h;KHjHUQ(26sbZyjwR=IUn%813L<>`jLz= zJ;n&f;JE9$fmROM#4NZk#!(-Pqwa#OLj_yAs^D0@j}4Gsj~FK@`k41Sv>!fV1s_qg zUuI>mMaqG~il?D9kx(XkTp>@NT37a5Klc4&v4DYv!al-o% zd4AlOqAH9+ZnG1p%u5eL|DLSVvk@_!Iit6n$+PB;w~cYTt-dLRLbul$7>Jqh3Rac?!`11`vKG$JMP6TvXjIYNd9{)D*zWx&45K4WxL=()j_pA)3` zwJLtTFhCl8q(QwFi6ln+{G>&6f;8&-cylfx_Ui~)uk2$z6{44<9{6|tFcRs`LX>lg z5gl)ItnC)Yj

$kY?k0LJleN{CUpt+)PM+$X#WbHaK5MEpW~<$GO7{=MGb1!x$4Y znWFEcwEr~Ge;UNh%}BKuFqqO<{Ht4nO94#=Ygj<+Ya zjF|8<0nTeycJR?>`f^>Fc9CYGa#Hty_y1Ox*Q+_y_ z?x&wo_;YTOimMTSzQ8rAJFdxaJ*I5q;!k*d9a5|uZ&tXLLLIG=j3q{)qCmdv$TE~& zin8T(WlQjk<=^!`^9JXdw+;GVLzQT_W6nii@qL1^sW_(<3A36Gu4n(!2QtP$g&4T$ zxH6U+=tE8PA?RirYUQlo-d1W#8bz8A3$)v$oxyG$xt2_$NUcysVHbc1^OtZeHe zb%uO~=qzTC0%?9aPqO%I5DaOi+;5~sjNFZgQIb9$pV2PVlxT#~ykK_HScf4GAe|Ix z`{2A61yP3o&Nh>gcMha(Z18igCV$;-<*0=|HYL_*mo?gDT_UQxh7j`lvEPQ{Gf^~T zsG@xSb4~c)_sK$}`v6H&wyTj2?59T>VXQZDvyhv~Op1&O4eNYjEojM^gKYvmv-I!r0mysO&QYEjv*VA^`&#CyE&ale;kKq z7~|naazmMZk#3>3L@QJ?P{S2|X{1ez!he{D_|G^DZ8zpb3)soR%|wpp@Qd^dbtP7< zwJ{cIiLP8*_Vc_Y!?<9~wTU_7PYmVon+D}H)xv#%n|=iMe@Pr(>X3C-Ax&YUhGE7; zgP7o2-UQ>+#LZCh59a}yaQ$b9@r=(AyHO_x-GNpEOc^8cLpyNwBG@qvQDeEYg8Ab8kVKq0j4PF_gi7j>dWY=P^|j z%8>hzgG%31e5O@-^Rn|Dp6AX=M&m4SH4}UL?=dtZ!6zVQrt6v9n@h5ZT^~)o{Ml!cjNo2 zP}#?|)9_hhIzw*22Phdj4i>4oTD8DufEZkhFii+X5@uI*S2eVcmRCf}qw}*dK0E8e z)-mct^8_MkJoOgNZl|sl8fy(<&!dU|kLF1m8a=QXOANND)y*1?~^@pCTOs$Nq=a>K?t!oc~_Z9ic z6;$&R;w=?)Q!qfma0N3I%u#TQf~ORGtYDdfI<#Kc>=pD>u&;ul3fd{^HB}MLRxnq= zdgy$=mtKe=0&nS3Z!N&^96(rq+_L(c_q#&zch=K_UPE#;P!CVFRDtJo4 z+X~7Q{Gec^f(G4%avc@)RdBe12?}N?n4{n}1rIBDPQkkhzEQA7L34$jlpag(5jWiC zsSb!q96u;FAugn{K6#orTQrBqN^F?A&u*um^d~?5|EM*A&pH;=$atK57Cq7bWccZ zk~AeOZeYTAl#DxK>H`yE!V)6mq9VJdMc^mrksO6e>CetjQ6CT`{m0#6zCI~QNl^*X zz@)^8DBRXwqaGZU6d#)qCXG@?k-4fpk`;l(q=2X}TGzm&sAL>Qg4Ft_L?xvMMkPfj zCdG#(M4%NZXhsrg`q$ihM3UwWVuNFnqQW8(NlIz#fb?W(RD5tOY?Vx{Mm3VC+9M%W z8XFcDJ0WU7@7QE%cN{{dqelKw;VGj>M$ut{q&65Q!r%LG%|*!?;Vyb?V6O72oo{_FzAM;B)=q7C?YIwKos9K zgq%|C9Tk=u^_O^4#y=`9O8S=_V;i*Tr-OVFA_quf6L=eveD&bCWS_XW#0WxmQytX{ z35!dKB70~El@mgL&`>~9R213EQ1p%sPYQ#(6wvn2j`N9(M0eq?)&w zj~KO}u-N3NI?o~G3h_g?NO{K{L^{zt{F7o+qk_{VQG-Z-8m7HNO^gnw-sYPwm=~)N z%Dpr(nMM5K5|g78I~~*lqoT&rp%57ui@ht=>pMI+F?hlNNmN8^bZkWLMA(+^`(#Gx z2EmE-X$g5qOBo=IRF>LYV-UK8uP~WJ_x(8X(8|$F*RyBV@r_;t7Kz{SxEh z#IR5T2CVF-m_a0hZ(mAWTws_qhBgIx_g95WN`!72$m@JpkTnFENy3t%21t`)6GkIt zfogBRK4Fr&@H_QEVX@MH#H0b(9~adxd<;B_1kf^Qb^Q_}>&M_5MXxBvA#&@@*9AS{ zBSit>DblE9z8%&y-cK0S{;^5OGBGI~=_^$Gq{K;Mebc2;!HI)nBcuFc!ji}^>f;S; zITLLmg5#2-G)7@Q+GF?~KPDh9B{_yR#5W~6Iw}bxM4P5k%+dW)q>>aV zt-X4KSU6V{A?a#Cn26}1grxGia$?6i5lu~Z4Zx&@d8Hk#B@G8A#?lD{QN&UeX{eVE zO-yJwmhc8H??d!Zj3-_XiW0obUhwm}f}YmTUJXx6|6_D|#_a3Ur>nBU1CyhY5-_O5 zVzH;Vc{2ZX3!|ioiE;FvJQ8(HixU65Ul|`6n}oyaj~k{)k9g;f4dNXTKWvasP~Q<^ zzpw;xLZVbWI!Y>z5XOsmd}7jA3`Q{y69x{=Uon`(7)^EcqWJ_4!8&iCPcXeF=hQIV zpu7=3$2&&V`*B#DI91`Xd`*(0q%BdPBwZSlm@r<9Lx~+dUI@2s*uMJs;fmxgQEBiP zdXOT$Tf_7%JKGUI&*9X>$-|L`Zo_>f5`V1Z59g;`BT7KA4nSP zx#Bu2X>9-N%58WyS(EWn; z&wm;5Kkxfb_NX9VTCe}FH=6H{pKAX1OZYh-`h8>IQThsvQ||LV zFFY@&w2P87O+%fA|JAl-kc#)eO8?)z|1WhP@%QyDyeQbi`?7+^1^(sue-q&yBIKX1 zu#4ZnN&lbk7M|q%yT3x-${F*o{Ljn&KOu*(@CZ!lqhn&njE##=NR*7jC3i~d__Xv1 zLhXEf{rtNIbnD)uXRqFU`t}R#A2c9%V921sL-^v@k5dxw$r(JiApP+T_cw@z z-GBL;St~?&E4c4}F};r>{hvsGb^VL=*5u{-H|yyqh#$AQ{3oyDkLa(CPyhEv-WQWm z_s86K`!O*>nK$k}mQ~^VnkEZJ`d;~QK3=An$8C~?`Sw5M6vehmk*`>h@Bdr6pzc&^ z8=Qmx(zySD5zN2YRDRl26`QiDfdlX`-hZ$K`u~T=d zoX8b)9i_NVQBZkZsBGoWVd~FxQ(o&)rYuvY{=fYH-2x)q-+;K{Ieu4gHN*pYJ~$Zf zeRPK&3Z8)shF$~?GsZasx&%zO#{FyP+29QbeIGyuc5Ec*9$*|~2jVGN8$6Q(-5VSL znFc)=ltU&$ryS9kkR0ft;5A4tbh-{(458`EzGUy%2m1IR{+^F0mEL zT?TG}&|@i&EA&&KH%@$)5KkEd;p+p=Q|Q^?JqWG;LvXa6pvQou86kQu_{0U;LueYx zJ_WhN=a5DW z<~w6O9C{&mmkM+l_!ROGx*RNq$e=sn>vjp$P8D=Lu&qM(2X{kuAfH0;6J#TFIXGmzkWVDI z07COw2Ht>}w!?V@G{oAE2Xqlwm<}JK`GB`4z~<23fOfxNPJ`YX%z@m5z6HDp;p+xI zROpme6NPf^!AONpc??3+-vqTMDcS-$L58Calp7|)-=J><8)XQ(4Ok1Ia|_AD?`1%! zZKYtyR3Uya82c;Eoj5iQteA!|480QUGXvubdLX!eCVUrq0k|?tICd2{b(Ww{1J!2Z ze1SBSts!(wIe`NddJK3LLi_6+*kq2-7Duqvd>oJGuAIPYIha48-voOu7JMTVd<5}8 zyxtO=LzfcL1-b-mf@{Cw(8VANq2+pmGa=M&MPQTVLfgflH-yGhhAMQ*TM$~eyP(+$ zjAi6$34U5BoG;`cyGlso4Nid2d?tc}SHoA4o-!9g^PzmD(BFVAYlL`LFcU)iVjB1o zLg(Q(;HkAjK9r`pLK@1hkTm4q9h?QB`OgKt*1@+B?+xCE6heOp9$t?;p;KCIfImRD z0hd5%SEg!}@T2z?@W0P+UgfvBK&X8v+wT+F zr11`UXeu6H#jkyRy{bw*ZCL z%rx*OWFq2u-a{LpUjk>`7yK;?{05=eM?*U&0pS{MV2lN-40sGY8Jl+X&ILLj2P0$yC;9Lmr1K=G9t0f|>--U5R zxdlSU1!X=~Wt@kacgto;MjD%z%Ps**37tnWr_aXHB zUIRwsy@oA_j{$E&v^?Qg;6puzm_nzVqtEc?@wuQO-Zh|QiRe2A5Lcv;g2a#^!=Y0y zgakv+0W(Y)at=D>R5QG*0DT&`+Fa0cK@r|Z(8Kfcl+}<7q^SX)G-60TbQ$Q_gdy2T zLmAMNA!*RNgJE{41N2DnU<-zXLO%>TI0*H01d}1uKc(PK2)*{s2V1m6+i9NQ0|>S4 zL$G6Op}h^C-KWp=i`&4~NKY9H(dz=g1@nFIEHUCKJNgR!;sN%C&^#$!{TO0{^psm6 zw2yXx5&l9wBf*mpn*J1+*%f|;^fmzu84IEDaiCQb)4^4_ynK z459gFfC+u^UJBwR;G=%98*~{sC5Rz=p=W?T1BCSc;CIMP#HR%_?|M zGoVw>ibq+{=Yn<#C=a?l*cn3G-UYmo$PjxL;{w#jbNDnKZ`hI95IPoeK=pA9@kSag za4UrR{|+!VNl23gj!(vzM4B|vNGkY=Jva)IgZNOeeJVp%LH7VxLTEp(0uL*6O0Dri zJmp}8E>2^}#Yqe)z_H>f3>lrlkWoXYUmy@$b8 zzY6)ggC*1956Gts95Msr9N#@L95kOP)ZY^1vQQ_)hk`{AN9dGtNH*p!${DjT1`wYM z=0m)p%R!6T4CxNt5^O#Xwu0^mhCpcD27@UOYKJti3=)YnlqU0qcvG-Bgr?5`FGJ=c z4OzgDmXLhta&Uh(>_hcMXct66=ds_=7DyU&`uz>`yBXA=Q_}B7aDq-rzhj^PIwgG% zmZqVk@2yjvlDXh`IYN}JxcW5gh(ONAiK= z!4I1dANUXPq5lw%)7(#+kWa=x#ACYsX%o`t{zH5|;^}t@Wi6>cET+3Ean!0mAUjZv zIQrJ_FIc2TwluE~hZZ0#st;?OS0h>4_2KO6YIr}OK3sVl;rbw+?dn5(NDur*OAjLa zR!f7OP-g}9?WV6|tpioRM|b}qYGW!|w1b8xZHXpCOnTrM+O$<4j-=<f`6J=Am4^+W3&5BBMKV3gJ$1A2C9HZ7({%dx&$yG3^9zW!vIXzDVnxb7`C8SgCj zZn)oStM{#%1n@g=uLMh^||+H6Q%QZluv zsX>oLp;1v$MeoZ;zAc|{)wxX@?s~$0&QPY8;ga#`!-pM5cd55}b1<+e`BT)`Q`%i6 zCZPY`_z4rf<_+9_D{*UZtIxZFTm8~&(;_V^ui>L#E!3NGWSaN6ny2iSxcwg|_iQQk zY<%luYQLR3c62#<`0&InciwyHg*GWHLCyzV)LXx4KL9xsEk%$Ip_dHbhqaGzb(?W66!%a@9VMx>;eeN3$`xp3iv2@Emw;G<99 zb!Eu%!-=u2hWq3#*J0!>X!maGZ~Afhnl-hNJ431?kBpp>xNqocQ{Nq9uaBDZ=aU^` z%d?eZ8{1B|ElBw?Y{ZDu*RNkcoqA|eT5Rmw1)9GeKS^9JUAdClabV%pUAkr}GX2ed zGYu~H?%Q{=-=2FlpBv3P({1~$cSjv(-;Vq`FQ$5)Yh}Rn!U>nSPpig-9Cr-M-52ot zjpHRHC4X!mls9?TqlE7+8B3!_kAB@D_?#lc^;f6VmR9did{TRU!jalv=LJ0XdYN

euj@av~OR&;c|4TruDotm;0M) zhjuwRs^63;&2o;N4Y*WW9ql zkEEi#+0k4*n#DDXW0j{vw%_`?FnUiF=f3jtc>n&Uha$$dt=;GPZDoALf(`rBPVO75 z;&Mma!`P`!n=h|ky=pRS=+Lh>LtQKP+`D(L;(f6}V#oU{oZo6}a9ES}EU)9r%WD%l zth9AmdU4?X??Wx8+^h&#u(RvR_Qj(XydJLGK@{ECKtpx-kqIfWlPA~ip@y#7`Q+=) zQHMACn!ng8B6*nxw(P_Qr%s)^iu%~@xHZNk@<6odo>AM+Jke`5(vX|nOzL%EuhE>) zRe^6(4|VC-ar`)#=R5T8>8Ys|i!UGX@tLo~Nc2x#dUoLTn(99SHWatp8S>ts<))iM z4cmSV*ju|Y?8<_O3mx}3y&s~wp~BhOS%25?10`?ZNO==$p4)!E(Q)^~M6*_Eg)QEW z9aVk5^NP!_u6DGZ_Zi*YK6LPnQFHE1Ir66bPnE}6NqMggroFDHo^p6L+ z^vI2QrS7{ppR~BfV`U3d4}H_Mt-gEqsCn6leB(K+KUfsR>WS{bTaVir!Y>{~y;I z=N|I(USA%#{L-JJlE>c*v>M#bQqNV?RpnOCxkEQin}Gj2=LYPY8)T#Z==r|E?8@V6 zC$iVww$|0veRSZ!fhVKjSugudd}eaCLru~Cre-RkdLs%XJ}xdUlc#J7s#;n;;ai8M z)3o~3zTaBaafQ$C?IXq=m~%SSqSxwyPU9z_?cOIVf1Q-{XO|ZjR0C^owENX4Yue$h ztD9I}Ke}n~`XOyL49p8(-~YJ(9*w3YCq|c=pNMv5zD!zXMTb~I!l&YwFJDeMc<|s? zN!!SKG3b6R&tEFyHr;qzBRw+myTjt1QTIRoKQvv1Ta?|?-dz@0dI{+U3F(lOM!Hcz zy1TnsLJ+09Lj+VnkZzV%y1Nk&>5c{V+xNS!-+yqP=gizQbI(0<#{FhZct(UB#lJL= zuMc(-zrX9pC(URzFffz}Dm_v6Wdt%rAW|U-@fOV^t`U2O#@bW0rGqc22rWpLXf5pr zVN0C34g4%dZ`IUi_$0IyspIM9&Fg&XGoB@xom>Sr2j88eu*~#v(X|PI*)PrfAMj9j zTu!Nq$Nn`FqKv2GN>_PS{uXv7iL`BaQqZxk=B)RyDUNTcpyBjG0O9~d8L_c(yo@rC zmmNt1@;v>7FG|Tvl^7GyEh!j2RwnYuE6C+O1Vv9#s_+q&@@Zj+|7FGQ=qx>Kf;r#vc^4?ClSt1)Xe0)aHJ=)P}oEkG?!`sPhpTd*+TXCf!6?O6Km~Uf4I( z-`Pb)h4Y1&c082(0%;@yFY`U9k(Q`mHYSqL(IusFQEhWxt+-vkG~`G}wwK}VFvcsz z8HqD~sL_jaGBNM?e5T^rOa-6g4$+Gp@=_6ksYyzsmRj_%3qf)=@1PLxAhi0nkV@|$ zgZ*o+?K(dy+3=zUp@;~I*HIX3NJ4&q=kpa+4_H| zyM7wEw)F-AP#sL9L)JjI)=_^k5u1UZL2}*=*}#t!aS8KrK;Pr$+3HDu&RJa=@6kCe@%kTEH9QHFR_HTxr(jxgk5h~X1P_^*Ml8dy%D2`kS^3!QE#3VQy z^m)m@=F#(Jz;t5mCiJR4nk*YFbcBFXtM0WHg^dcgqhN%R>>|z0%_3PSpr}ScAMzo$ z>n>N*`7eW;N8mCJz_x6AFfr`a$IoUzW*z-h*<-U6`5#4qHQjr-Tx1z~?z=spF_YGh zt~@`N3Z!KN<9VWBNuX3>AGMb+>}M;!zP0?5oy$AaWfs$;I~VoqTtsF}1HzeAI`-m6 zV_S3UOt?zi?)(x2H20zgo4mm@PC~rA#%-RYjFqxXsrsku;3G8(3inZtIL^((gFGCR zdOF(^en@I!>!?Bp2u#Nkv)Y!h-TTctUYY2q{CSB~=nb|ymZ~3fPI{Q$+dcjFKENA+ z4V%1Hm{zs=hZ7#y5V!wZtjy?T{l8~kCD88in5*cX;Q2`ZKmQDh%{^c{>9o6qm6$$tX5bx4Y>9E z%T)^K!Y8Wx0Et~G_|5@uX^f(IbsJg69roC&#E^|LOSz0mvIln*es74!Kwqxc@$rB|i5uGsie z)C*;WnQ+nxV%fkd9_Hf3uNHgWIBm8$ADD)3j{_+1PKJ_xpasm0 z7=h=dHIfKR4|-YApGVjQfw6@a(BVQ}W z&rdm>y@s+ytF9L-D=T)(N7tIGe(NEF!Y2B{A{|Fy_&441P9WS370(hOPB)2Gq2|Fl z>IDif+y6UPKk*)4SG{GH2_#97r7MkaOQh$RCUrq#TDD53mM>(!S@)=ryT>!m?vmOm zf#a<8sMFKRtTJ^1*B*LGzSc_CXX{LL4MyYV>-NKi#LPJjdGj5!$tmxGa}? zY@~2Jbg=;PdIfLw#fBMH>4kA8Y&#a*2~+LS#fV9TT;djlp5ZR7hT|P5ms~NGT;-06 zU7zIithsP`xgje_HpdNsOn^>Y&7C`E5`)BsoA>}3jv9ryjhm=m64& zU6T}u`eNi@G4hvn-o1uF7ldYe!X>5`=Dr71X@0KQr`gSJ56wPkgTWI|q^H zdQMJ@54#>2vyfbGzmQ^?$YTGOL{Baz<%VUzNe=~&@f#dhZq2j=Rq`Sjh-|3JH6{mm?%qMHNIeY5Ai zk)FOj(h8N+kpq5a?3xW<(v{1MnRR}weicB?IzCEeE%h;Okxn^j_6gyqzMauA{WZff z37WAFghjs$g*?0;zfmnQ)pg2&)4Gfo90)~oMZTF$AQo0sUhV z0Z0GU3fgU&I@td2+%3FhkZ`b=HcU?|_9r&}T0I(F=*iJShm~V5>5AbGX(+843I;;$ ze0;9Tk5R=#6DQkk20AdiC2)VLGz?YSKSe_s#{@!~@hcN=_a^`7kBx5@4<>A0zDGwp zSH?2fwX+aJKA8wK`>f**dEi3d!N+(z>=LCfVqiHKui)j$Ape|E7?~L0vL5qRr$*CZ z!3)se-D=y_`IJ~}aq`|<>cQ*qBAEki%JcRDFn&-BJ>R2m=YcARD^!6>$@Rr6o+M!u z3!g4a4%QAaN}Il*!$-DC zGfnT44XdsXJg9|f{$|cU1pEmb>4?r?9Wx&2O{D9;yK6kEw1S2@sD~21UTAW{XgVpB z5lz^@`1dO!RJ4c(yqeTI2CJ-%vXM8iJ&#K-plapZd^|_2^x6E#l`>8S(NCO=y@RAJ zvwH5eTveJ77m-m+KR+OC=I-l|iO3u`9>~VIfN8|-He~>u(9@aCOKbf0NR9)L`OP-S z1U$5sVsYLViqh=wRZ%u$YHa)q3!?EdlK`9oZD zrdjLNcC~SjnZ!Mad(4mii%lqni~uRN+I|=v6=M2Emfo#eQ3JkK)3QE*Btp zcz;DjMYI3emR!EZp5jT{NoT5@aBH`NKl30}9!8`L4qS84Be+e+vGQA&>8(A=lygIZ2kL;4FS{GlTEVnqBCF0IV| zCxlDtGIWs8bx9)LwK{PaN7bBYqR*EdFf{=%6%|gz#HS0V?-@n--r9*rNg=rZ99X?E z1te3*6}@?3VQIP5tDN%ih)PqD58aWNFV|!*Gl&2qoMxeSJ9Jfg73<3K*=kZvv{SZQ z11R|2#)&B$%vSvb)sZzSG);+O6mZn6(Cb!jE7Ph3g&M)o*%Bij2!z17|wsj%SQsi1-krvhBv+cuNt4OyD3!Oa{ZVBxuPvg z|0-i0{{~gjdp0dUp4HinOJZy!M!R~VO&ei=>&uXss5Eze$1}h$>repu5mN#2RHCEE zGs?B;Wfq7(I6k%q=z>6rNM0xL_jHFW|8y#I;J^DNr~y-prW38w2;CnEBB*fyZ_t(l z3T)YI*<6BsY;HJ4uSJsVz4Djs5_*jXyRWWW<(&>)`xm5K?$^hg^!d6{ZudAwNqN;W z%U;<&z?}nvz#u@k=5ZdgA#K@`*77h_6ZWRDxKdZVp=zRdUu;5O@AG2bnW z+rBklyQJ&?D>>}~;T*<>T@Sr@I_FS4?fHhfupVg>mTEH@X>6v9Jxq$HeK^$-U%<&=9RB#xyC-J8)vs3L38eKl z6kD)I3I$NjCdPGHmqVpo4$njp#Xu7!IYiLb{CuyPKs_v4yHw*AAPs*1D4>q)MA+qQ zSb32zgx6h+oLndW;;$~!0*I6eU-4AZlk}Og5qZ!KH=^55}b{b-gxfdN(U7ze={r7f=)Mn<*t}t zWUM>z%Z6Vw zR22}p*IE3XJ{h{B&h_>~mF!H2X34n4ldR?Jg-u^JK)uF1p#i$DKqTt|Szl*%iqTbl z#oL~<7D>Pme<@6;IHPzh*~`pAePa$Tu#F{I9Rc*5=~=pboB?qM<&m<2gAM?b9ZU7zjpFDFd?|~Iv9y7Pb}z`OQeyLnuKcOGH$-?f`TYP zY9NFyOO2Bev8}a~6N~t+-@&1Ir=XYSvRxourfBNvc7{Vlyh+nj)BHn$ltME|T7lP} z0gf%Ku8CV5BCI}t+SAYdLEbj(``KbALt+!4-XrA$Kp9MQn7#fEWXk66$7rV=RrWEp zn3i0y&CkVY>)aA&kK62Oy@#{veHj*Il>Br5jZ;E3bs>*=8pM19c;Gb{kjkUQo%tHY zc_*Oi$am}O z{7EV`eoNyX|AyQAS`(f&KuQDMrhtsQ{A(cujX?Pv3}6=(4#h1-+r_MWFTDm*;zL8r z{+<^Eh?)%FFv+U1`u_gn!sV}g(9t-vnm_4~q9gYh&t`b9L?+&^?Yim#UY+6fasvG*2lrZqPQ~cKFEWb&e&;cEJ>j>tkowI ze4d&3DyF2onp`*Ummo7kk@L1C(S)q!@*6aPsuv`>un4rA-%Q7`ueXTtm~;5Fjxm`( z@=e6a3)~x)RFk8X@+mDm3ErbAm^t8*FlonyT8;@I1n*-z1_}fwY?n>8!;TjXt=8XA zq>IR6*r1gVDJ}?W5719O6@;Xl_$|kVbt;%;Q$*dXpbw_AceRbJPIv0x#`Vp0!xdQ7 z`WSCLZUrXM`{5(%rn80%aqDlc&5#3grRtSbx_djg+gt$^X(FR|l2k-w*oq$3~&sbB0 ze(*TUr6$Q*4qDU1ULhDjA(W1$mNktm)*2UeP7@>MG=_lU0fJ16<1k59`a+2HI?n>8 z5PKtLQ^Uil6+<#T^@{f3PYy5oWQ7m9FmCjtl~BwFtS>n$E;xQ81*~2N@-XNeB_KSi z7hP7hCrw=ZPu4f-xi}YMyu6lnd$myhVtp2%4PP#a8ak~=FgVCpZL@^#wsm7Q+6#Q{ zD*G3^`6{r(bl+PH^$OYVLZFDmn`~joYR!}r>Po*fJpB|&xOq=!&-C=vy#8CC2bI<) zelD~x5tY_kriwq|M+!m&i1E~3awse_9eI}sCM{e=QN((B_#rNG)*Vmena&LJbUEN- zlk@s|Rv1gKCdC?9y!|_a6z*S@zb39q=9OcAFYc=jM=*6-j+`Wz5NoO2*|AC474hG^ zU2<}~sH_yanF}8?WLUG*qUN0TJK!)Ph;5Nnp7PTkDLXtapBmGcu6j{s zyfm3!`5L~Pdxe2T>whWQ?s+`4AjJFe{AlPM5YC3fg!GduAUz`s&XOL3o(b%P^s9H$VCnyE)fF?p#T1tFML$9tUdZJfj3t zt@VB?nWUKX*S9ct?Qk^+ugOg^pjlaCS_x*pjdUWk1cAF9E*YjfM0i88!uMmv#>Tm* z;t7D!jmI!z5Vvj;iYof#zriuS8Pm1bsb)m+x@4ta3xZ;}JW&;sOw|?L7DfZeRq1y_ zZ7q>g74k=#=eo#+3w3WShucrL=en%%7(khyGHLk=9;D%n;VPw3-xT`oL!+_- zP7MXB<2y*zWRYe;NU7Nr(RkOJ@|#)Fql6hv-=!R%q{l(6AyBN!_O%HfgkgXJ0F~Xi z&c|ab6_9N99iNnmF79YLCyi;qUlA8k?KZ;t)}I08CHIOGWx?f z?wbPoj%QvdQ}pD$H7Ce-E>GfmqeSsEoi!o84>tF?UAo2qK99rDiI^ysa}sXnQW4Jk zuT()1dM8lX;E3!J?J}foGLi#F)&I6)utsbmDLD$Xh%4oQdB#f!U;$#uR+zqKPbl(+ z)|E*ww>C@V%{6U`e|ZWjGwZ~!mozq(Qi6Eufn*PRUa=qCV4h@!Sk!4@t+fR48<8>n z+shAPIb+o{i%PQiA^n1%t$Bj=!aYz;my!<>5?K6OLv^eX38s&gi74uZ9x~FWnk)TJ z3xGx{KHE|Xs0YMAB1Y@XZN9mP(EKN5ZIy61NaYJu@fVsNCFbSk${MI@N{^Ng} z-ALlmz#Qo+-;BA#s&)Q#ftoj?NxC+{05#6Kgdze${Bjq$QfLhdpP*y>x z0QDj>o^(&#zY1w6Jq(=Fz)$2dYLc9OOF8Ye4W&&#|B=dQ(RD)`2^ znc!(H979eIK=0MZd9t7<(?CQ(^SfG~y4feVhV=+eD)0 zJ?2~+C%5}Jw^PJCMm6kI8kQ*&vh)^YIz)Pav?2f-d8(AfJa90lYfR18qN@Gfp)tT| zO{`87d_rVX85J&k60T;iR-XSp50M;C|3#{l;>BdMS0;rxkJ}R>uvUt(C%-M%mYh3L zlb7V+va`|nn=<~kkh`Je9_g2pw1}u!Jz&i@KCPj{o4u&3^j0EecQ+do^iqm=M=W`m z<+-b|W#$qK)?E#E?#cCreu6UuYRpqouUGMQXy^Rq)$K;)vZ)gyv}8Z}zmg~RuOvfX zKAcyqZ8FWQNi79T@amp%%-FCVi@C8l=>qZkKGMOUK?Uor(-R`P+JdSz9uX5SDSzMv z35i}5y)3DkfIJ@Nb)97CI9+o`+PZ`Rx5c}Qp5Y(c|2Uv+2^4QG!75F+yRH{5(zu`z zeBk3ra;9jA?#DEzSVk@3Z?Chuh0eQ=NRoSu$OB}%KZkU_ZXhYvV`s2PjoB3J^knZr zT^WPhVG6Mw&t`Y;c)az!hl{6>#4$^LNkBKC=~VkpcmaNfVSk7gQv zq*d1`w_}Be$)0x)+&P45yvBEN5in zquCE81kY)HRw+Z8-S_nuW3E7$YVrn6cfF= zn&%l@g)_4A8U7_PEy)|9YB@em14v$5JB&xt7ktueqscPUWU3+U|Ko^BddKr_`!O_( z0G6p}+K#>-{S%Opk?}?6pE&RkICnifko{!QdUC;2*uPn>Nlu*oD=V7xr7z{Weag&( zz8YKqlp~|nF`_1O%<)ofD?znbT?dE$Di*EkgcG>8_kz}wRTh_iQmif?4-;eW8`$qC zm%j)5TahLH0eZF-eUAyEtf(H_I7yngVsr0{tD5;S=7Pe9_agd(<-Bc|oAmt)@6bci z)mc=kHFduc+o+vEi7md&NkhPnjqA&0&Qho=^S_(9^Dzl;SjvHd(p7de*moXVvA;Qb!xt655y+5UM(q zusgg_(8W%uMF%v*<7NRs+lQs-!DnZC9;fghcvj3-l>oxQvnCWnjKqYmuzbH)FQMuI zncHfkgg~$5{h|=_Q`eh;riiXU3;dWuQJV+-n%44Bx81b-gc=6bV6X2$4xjXIR0s7n-?V*WIlaDOQnGF zc2uL$)BLq#LNi?U$;XxmGj0B%YaA7+A!7kb_k=rsNGCr~MISU?Rc7b3-xtFrtjFUE zRE6)N*^`n=3oa8#J%PAKx$yA_CBLi3JBGGMa3bJzd80L?sjSsu2QB5{eLpVR<0EbZWaQ#b|{=B zox@smbfYe~z3}_ucFStqZfV%i#nBjninT$&U<68_1vPh6mC7V@{aH zFa5~oWtjBKDmAeFh)z03re`LWB&`)4pDl_|X;Voy2koX;Qgfeo#z{7!(n9f-2D%I- z13eFC*F)c@+X6SlsTTfkHd_MX=$?=rPl{5V{WX=+^JTzpjFY$1WJVJp>s`k1P8(q5 z-*wgO^f=eW0r$a4GHb$Sh2j`F>_b&_49>H~XdsXj=-CqwqFf`P1+jdr>6q*J zl4UVk{n#-@erGT$rgMO1xYtnUL!35%>FSDCj0Et0$51>Xmfb*WQp@?)s`w6AvI5OF zgnWFvYeK?taghPjxLFMT5U#>63LMIPTzmlb=NV%E2weoGBy=x^$_-wA6DKporUWGh zU$Nm!1qg0q#5rn~{A&1(GBT8Rn4x5H@vl5SO=ge*ST*vBOsjABN@qxTmoA5RXm%cv z%*yV!`^242mBY_8>-wBA=?|!CUw=~XeTnzIT#!FM^uaA_vF>ZfwJcNocH0-qCaK|t zZ4X#wwiGGa1f}0`!71E0T8Iq@i4ylymp-um&?5KBlAKP96cc9W?LzG5T_)KRjAEB> zZRp%n_{-A1V*(RNXmY4p?<#2NqIIa^YpRK=mI%Z6sQx5c{~J%3*jH2}N17R~2Y@Vx z_Z2U!%9jx3d{EL3nx9xG9!sz=X9Ag@;Md4ih)Ox@2dAR zZjQVrHGCHP-gND9k5G6L!o-|C-uFbX^5z?T?bdvYp~E_Gnomilh9d1HwR2PyRbF5G zTSz4f_Rzhit)mQr#Ay9n(C$6>OAEzru0!xnEZJGf`psyT{hZLsfIZ&k(GNGg+LwxNxTO18K0B&t`iBQrNPO4DIX*lweWS0FaZYph49qa(XF zR#m>yWasD3JdGidOBdzp+o1Lnw};T%vu)#^#80ek3R~w-ZD`w){+H`_`G<^vxjdhE zUaRx+;!gRtq;+@20zvEF1CEfe61C9GA5>3iJ&JDY_>{y?UU4UPQml~LE3VT?ao+`d z*j0H5T_j;=813N1yJEOlh*W94)L=P&-F0WzK~!3q?y{h{{+{Vq$I46MirIWBy9RM% ze1#2L1~_=-tHyQCU}30(v+RV(P!q~4T!HS((QYVBWd`Yg9>Io(5lP@wrCP#|oI5xq z9(c-J=J5abUZ5=8|7aWI6SUF8pk{m%kdVDu51BY-rG;FI0+y`5iz*1`&{1&h9uHDf ziAY9Qw>OUWuV&Xu<@4)7KpGiXn2im*Dtf!PcRvwRff)GCh44484c1NHx%_aZFbOzk z`2DKDBAaIMMjbuMEtZGo)Egx*M5N!bUnQfooGuQ+LBrT6VT)cx10G!b$LsBw0~Pk* zqn6bFI+FG~7)$PWGUT>CH1g8n-)I~~(KNWKS>*EH$sQe#j0;MT0_zS)cIJVge5(!U zgDS;(zMZ-Q&i6aPwbSzOn`5U4Yq4`p)9x5tDGmB4sT9!3qB!4uK&5VR(S@t@i}oq` zqM1>T2!?fsU+*x+3_X3e0MdnVr34dMBn`sJ8c()vFyA3v12zn{gCNAB;37NVM=&Y~~+K4-Q7<6kcv?~l?B*I`n+zKG$G5vyyE3^O`^Lnr; z2LqehQ;SH`2Qm#~6qi-mqN&Gmo5dS&A(g5^nRU(*Pu9GozJUS6BKF6Xwt3XC+g)z= z&Lqs*9WCFl*%Wdr?@01x!CnQ>+&?ZV0gZ~U);g+2;DrGKQeB;s+p>^$zY}HyCprZI zAN$9klVwa-yafeJ*-Cl>TgK^fSaxyIV~zd3G<~!vUgy@o)hy(vd`S$IAmjc$BkL7D z{1t$Kdr}o*FC}uM)6P^R9z}XsT>$4CBk?gmeEvFhaON`)Z;aF0;#I1Z+; zY&Cw?x&?F-YNVt-1!eZil3}5Q8d5eAe3nq$2j8r=^y1pK>aDuCn<+7akAwD5`9ur@ zfAiN)LhK)T*ByDM!;nZgr{B#*kG??t(ESq%c6^3uAEt9Wd`b(ysoL;S#Rz<{m?JsT zU-AWct>*(5u*`VIxVC=%)E8P^ll{0&F8iUa=KY;B>Uun=-E>usKuOt0eoSdifQ0;3 zh$qv7%7qmqRq*rg+dJnG7Z)0D+f@2LUY#Z*YG^q5w7!Bb*c5-POBb%b5K3#nj6=vPlU`Tf5w=y?4{ph~JstiWyr;u~xV7mf{#CR|$U znZT8XMS*{VWwSRo6KJJAAtb%$%iLkStkRHhwvTH+!;nw>uyh}uP-1&k6yTAfy6`N@ zDJTiO;qfLhOru>4lgPT^^WB6Rh`EXf5*;;(-NP?R7>S;{-`NfAbMIKyzmI#LF4-u| zu{JL_9+izfBIMXeiCI(I`|`_1ewOGm$6yoR5U)gJor90w#I3?NJhTqCFk+p~P@2gv zY5wa^%E4#eRC?Bi4?MaC2u9zrKMyxJ)-(nPHA&>Tq0##K{hc$oVEBHT*Gd3@%Qz5R z9+Mur~K$m;pfdReWMX7aU7yv?w zh6fI%GmWFTm-}-e)nZl2%vc(QX1pUjxYKgj5M_XJwc(quqXv-%1{E-wM#XHpl7ev5 zN7??{!NniwBR%kBX>s@>xjK($gh!j4fO^}}bD$=}FGIF@m|yY(ARm=&j!7rv+>D>i zO(akL0kph2QIWabs9HWfjYXH1{{|QabH~kH0c;0&-C_1GD0=%G?OL+#r@lcUQ&Ct- zF&1`K$)dxLa{LU2~Fu*6AQ7Fi)Yg`H56ppfT+-(9d7t{tj`r6`A~ zO5bz{zsA`X$4cebX0T{0QptQ$iQ|D;v-kWMQB9 zx~2w=%l8E41?z=br*FG1&PL%RV`1Ze$mt7}1a_&vY@{zYj;?A%bbnQV3$HeE z0>eu?Tz5(@1H=K)ow zyLF5u{%Z}dAHNHJ-cKshtk@`!aaUj)LFCZc<$_bg{n>$)sC>+{yGJKzn$&*5v@|XC z)2C0ELOzkgNsW~NrIbH_0r}E7B8&i>jdU;l3DbQ!?CIZw!QQpp;mP+3;S>ijSdolT z{^xi-zF6)b;Y7RQXauAzl3u*(p_{J&129{glXSFmkp_}H_!-h5+cW90<)Y}J=s6`k z249UKg|W9@!^-v_gnTMt?N&xlq4}UUNI_>qPFNGuh7kv0b8@tv8}T7L(YA)naZG~W z#7{T8n-}-sI2{{w9pTrxT6g140b9bMLZTrOr0E5+agupmV24H3Q!Y3aYb?c??+|si^7(>hHN3>l4Zk4zvDZE8hpR3LYSK82P+%bd& zr0g&%DZa}|p|JWc{nl>XgNpJ>?8OfMmXb8~Ii(&kTlO_;jyp!LJ!j$~C8i-T=boG59c7k)x zX$9Gjja4&0bGaj~SG!6oSd2@xq;S1w0QGsK6m#OQ>PWO^m#UZCJoQH}dm1h4m?63T13*E5Td!lA*=}D2wXM{97C~P-VXu4n3ZgLa&A2T z6#@%w4zr}Tb=nnnZ^qj&wqR$NLNXQAaT6z1KP+uZBYC8JNRSW6K!`~KgLOZzIL>x? zw;_nyASSJm7y`8H>2Il^IHKhb6XCB}rsZah{NRgAv%pyY7G3%+`|K~(X%B8`rD7;* z^2zn5Rg$&TFi$B=v*=2OR}|#?N{rEp0s69rA9|ZEH+cHz+wg7n$vwTVYpz0rAsW+w zG?hYyf;2l6oZH7~CLnt8iY|WmcBkd&;mSfV1XZo*-xHtgyb5{>z9-YXq7b_o{Rfj9+#UpvaMjPXc4Rd49G@2+z_oD#P zMQjmi>VHKB%nJT)d_z8Z-S(d#ple&HwhMF8eh^0I@jhTVSmU3Iz5PB;H z=ba$iWv+aW2CbwrC$R~9%I&+9{(eS`SNTFgvvjb2?(a?DUtj&xnMvK-QYn83>z+Qt zziE00TR9e&C+NyAIZ`BHO#s~-e;TL~tVD(Wvu5F4VoOX7jBv2ltJaNS={u;XvVm4~ zaH*NlNbzdX)|S&p?`W_=1U79W4_I*~`GYR)c=3^hEEOa>1Lq$wc0-qX%I>`tr(23C zyuEIloKR02ekQLChRA~G`2=E$IAI1U;gqKskVvgzuMcHRR!=ccbzOqsd-LzzM_v(f z3rQv7Pl2jvb`5M!yVc(X1sxNpgsYocKWdc6O9lQ_e%K8Q!Zt}`aMZQU5sv%|`hvS% zF=OZnV~GN5gQQNfMltCbIk~IBI~2ZXb6&v1CoXaR36#(B3Fs@{Ap(?NY23GVo*%ic zhpH_|H=ZtYCM9IjVPn<4NR84Q;B%FuU}lsaxb3Jm>6^BbOboj~i6iot%d{$H2?u`? z^aA1EG4y=&$RS@aAiU8rLcInQWdyyB>}V}3;q4qy$!O7M0NN`*QebIB->ztn&FjEo zkI}z2zAr-vdM_o?eN=-*^M0?JbiW_=x}EPrWQqg_)4us3*Y_6fyj2A{0HVkDRFO3& z=RIbaf&d`qjGTUYF-rz(H{*0vKiz)itiGsEN0)wXHtCdY!O-HAreknK5J8|3y^qx$ zGgf50O95$S_)wlqU-N-}Sp6?sPx776)0|r{z7!(gZ_{WUiBJ6es~!}~0yYQ>r2PBN zf*v3bazH&MuC5EXv+iQx$0eV^@syOrZ14ZpRcQAVr|EW+E4X}FwEKpwSt){DAc){a zWmyg4T&#MNViAY^U^#I}4>wy1d|x2xONrb<$zXTh0=09&zf-PGRzEHNq-GmsyL|rq z{Hs)@Ocpn~k_u`@1o1H1(6Mr#F$!K80@3?Y9Xz*)O7la%e`Jn-OaX>Q#jpN!0)U}E zH(y!Urai9)y~h6WQ;7HjB)SN@nYMVIXEW^X6WQs^H;(^zY@%7|4#Src-2;N;ctzE< z2#9kWF<9+oS%;0_&4fzEnJfn!3zNq?5AHF)fb-IhiE?bnea{B`=0{u_u^Q8Ff!ckp z8dDV*v09G$dV;Cjv-};Khq*$ji1oWEZLJpIy9%n{!FA2$8>Wi8Mf>30nSWOBM69D_ zP{0lxa39IcHOLYrJ@%5VDrhoS9tEsQwh2PLw{UxEdTnQ`4qiQ2{gb4&#+K1KDFBSX zaC24SRetKJP()+xE7C#ZWPDh}!SVk$D|g$XxfXx3(n)Y-NRzJOaF`d&kQ@@#7}}KB z?G^ayqTpaw&ULjMe=keQ4d^eU_-^#XhQa29&haTejf*F%K14CIocEuh-A`r}CJNk| z+Nth-j*Hv7aq6Dr3z`r2%<{%Qw)vUX3b&3hiw3EHUEx(xV{uqNsFG$krMGfcL@DK` z1^kN5KE^10ywSNEERc9Om4PJRW_yM~33ecsR`bntYsR&Xv)E&VH)E*vjJe`KacX$w zR9Ymz^?`puY5tEBIfw*3uph!>h6Bl9vFx^*Xe zDQ8!`URb-$gSdImH?ViXIvP-84)i8!s%-adG_h2qS0YV!7qV_}a)*B+E23y2GdAR| za$p{$HWbWSvGcpY`HqVHTCT;%D{j_3d*BYaeccO=YGihK3#1RX2y-m4fG2%cS~3j? zPdpNZSK0;cQSbo&sD%Z+z`+T^8sxNHz3l@rCICglpXdjwCEY=0lD}#s&mrPl#ypIwW<<-ItxRp&*S2;V(eO8{`7Sl5f*17d<@U$h~5 z;&6Txwqjl*@gSh3fhweryYrDq~R4Ou?oO|?)+}O)x>HI*( zXcja9AOe&+bpugD$ED8C8JMw05a^_JghO^A_YE2N7pfYUgN zlq@^v`=#!A5;rl|{Ya&rV%?-mX5|}W-E~9y}02SF0>NpabQU)-vtwrNxm8Ta!ht0~wqjx$XA( zZMfH&{a@ArDjFYoNq@OlS2m}hfbm05)!-j{vsRKaqPC2Vyw6r#5DDh1Z@7xLwPmOTDv*u-Hj+_cgMWt8`+Ol&E61XUefT9SU2DKZWN5Ci;JLcpK;~n>gSY6cYjO3 z!#Q0DR$^x@MB7DH{%z#c@^KZ6P5y$ybXDz%N7aQZU^Cc@Ei3=Y~T$ zuU8AjthkHYK|>p#OGDRh$ADh@idFy??D#8MZUcmJHYL8PJeI@Nn$GGN>wpeKg8SO$ z9(OShV0E#)7ad(`^BwSxDaF?Wz#s8dnBH*t)Ka{M09hV$x;R)kIQ#s7h%K`*^?c4Y z*uk&#Tk4x_aR%Pt>V^}ImS>G8WasT+__gN66}dIhb4)j#xb)iC9IMFGW6cp3%VKP#qSwvq#R%A36+6!OlbFPM> z*KBB@?mm%k&!eaH8UcDeDl~7qx4|-O#jKQXp`>UqHfMG{1dc!vm{UW#Pm*&_MyimM z)w#rnrGzK$NZ<7{rbTaw4#fwiON{3aQ*wi6mo4ZI+nVhJRkmoej?aFgL-Nl$2$|?@ zo+^=UpDa&fshRewZqIHSREO^Go=mA%Qgs<*ItzqY2FSsql#enI8uNe<8GcEpX`jr7(c9_WHh#64fzDE>^{y|y&{L=QIF<{|B>-zl(Qy*W zlIzp8o*9(?S9cBDeAm)4NnNM#!xFqkvnx4)VlC=<52`N#Y70-WTrL{X1{s}>M&WfN z(tX!q7zwCRwMbE0uo+E~7+iY0|4V>(8$XTdg9q*&sHQ zPV{93uyvT(%M!*0euYjM%;dZum3yLsdn@v&MRCsk$?vZ&1MhJ*Zq})3S4?_3N+W#D z`1Dv@z4a@Y&eD^L?M1dub6;3Kxfc_oyCpPODR*HT9c|godK+0OV_MPcjjgInq3ibn zKz0R$MghZr>$!YYj}I;+bP-%=Y7hS&rfZL4ni=|Pb(|v0OTaQ~nz@d%hc_CSd1@h2 zu=h4B2Z=aD6V~+@Si+~?@YiJrS(vf)vzLc3roo4l%2=$z3spvpej>I(|AzT)DMSz9 z1N^<{13c!qFUqx8hf8kgY_rG1aNWtiJUjeNi4D>@k8R=-Qo#3FtCJ~to6Crt$vp^Lr#4%i2eS7#SWld`=h1yq*yx1e1UK7%jleS?k)$`7m@fz zC*s9$=(iwEnY&-_M0_~``OzPyA+e$!wzZq6O#m35DZGL`MpM`1|I3;c<@GO6Dlchx z_HVJ3U!Ck$V6OBA^=m$>nU;yd+L%)FzeTe(Y0yK3hxhYo)^Cdi5Hf!@{TNfjAwUQr z#W$^lNxHPNY%4Dj&mF#p)Wi-dl$uw&dKT5Nj$R*qqWS$mU688N*%AFQkR;xDh-n4( z@UmDHdQITY9Zn1y!Pue0#V|PXMQup{BKMQ+&d6AI>2s!&w9(~z#uBWJ*>$J&dQ*e+ zua%@KIW~;B?G3b&s*EsFiD7--m^bF;_n=)bheW<8i>d5y&WfG!Fc{O)K}J5MGwR~l zcVxQ?ef!+&GjOEkJIUyiv$HIg-}L_2lX&MZx)GsG#QR&K&(>MOx3k@WImbEj8lI}o z&j%sSn{%fSrq#;$Gx$7;J}HeJOREK}U9}PGN`?H345F603SU0kj6_U+MSWx@1JnL6 z75|-5my|ByT75)OJXR?ZGxA$O$&ZXNY2II zPO=7iB@SH*FIwzzmc!y+t}mH_E58q~tV~NObZaR3`-6H>)eFGA&XA~irTl1=%{fVq zvv^r93(rdLqMl!>gLWkab39d&BL|f@DJ+&ZcaBFX>+CND?VaD(E;PdIO4K8AO(};$ z(u5nFy0ro%${)iWDfIO@ZuQM1Voy4PpCMOQPoz86r29cp2_=Ks57~Z-39I=IxvSKw z0Ww>@T2Bkj2O=*^lzUY%^4TtfD(ssw&%&evNWyNQ`z1P?ECOAccNMDZ?}wr-X&F_AE%Bfdk^UuactCGv1@-E{?m zkhjOn9>WPoU-g_msFdjvqESe@az!&!O4>=VTwi4w{n*2Z`XYKjBrT8@?A(cJF- ztSj^uQedDW{og#yTL8$=q2vn4>rhiGSDP9I!>DT{kN3bN_mwwudD;5%(FEM1&S49a zun9tQl-ddU2g=$U2?+%Y7)uhOp$6#|>b%6hPG1d{kT-Bev7t6cI5g;dGq#qi&t`)$ z?STXjYz`OadQ|Q9TuR*0WP5Oe1cu{ARhgDyMaA!=d5wyo^q!9o>bFB`+@LIm0WL4> zpp*7Ro+ygq0lqNnT~=5c4E%+pNCl_Wuz#3C^|p{sBDyOqi0Wpol>kLSXty|*KVbJn zX}oLk^F`#rAam8}d8M1#Vs<_?$kb=lV0vW|j7M6re4zr75d6ZxO%hUTyt4%Mq)J#e zJhnFDttH?sI)D;HL^;=#S=NOiD)0;V2^=(${bZX;%?)?niH5-lNnI-e7&1%l)ouE> z+yJYSG4^^&s)L~}1rpe{h-AzD$FN8g0>f??c>Es5A;&vQ!Yrc%di{SSopn@H z-TU^>3G|6sB|MpOGryMzw>=x*8Bm? zEEXsB-uJ%FbzdKM5?tjaaYPVWiTUwBwk?iwWf+)vL6>P}zkYMrMlJCb>>-+A${$Kw z_>KcnxoF1G2(0_$6rz2)9Fd0a-D9Z0sQ5wqE9vM2iT~kM^3hiEd>rWlB6$}bh5tR7 ziZsi9vw*cGIFR2w-iKaDRpjk(U=Ykxn}X4Mp-xTNYscP-1Ly~@ z$%E8S6jn;>b^h5!6l`p4>Q347%Vht%uel2BNymS+0p=nY{w+BCy|JG~A42Kz-Igmj z;o>&5=ytYAwXorE_T*=lh7nZq6PIBzsmiEN)nsbh8V1V(=AIHeWtQ*$)OWu1C+6>@ z{=)DXDUX?}9afh3&)f$`G3(jKEZ;4eHlG0M0%FpDu>1BCg}LU=xu(C;I~}X}w12D2 zcjL8jD~I@2mi_kOBR`&~NO;I3*v`BZMP&sV!f9xNn4LeTI8dNpe;MS4*rY6)3sWGE z)|8Fnh|nT8p=u0IQq%j#M4>@J4N#Odoc@mlzi}0nIg|5bX6lgeX3O|yCq|Z{Iq)_V zVJw2*^q)?kFq)imqcm@*q@LpzMOfW#b0K6R&&Z#lp@EW4_N*W(0sH)grB!a& z)ks%L47kGLL>pq&Js}uuaF5o#rq$O@qwCU@?IIF>4zvsYH&TV>0^gD_PYnAq~Rfx(}WmSCF)qGrPGSDiI zoS{`SNbNCJCMCHWh#@`Q+qbW+;`sm@aa%krD%^^!I_gd{+OTN~vey>$*bb8ttRW3j zo%A?!ibp99Aw32`u)^WRNI@&MT`|u|_2sY?zTu9$rWgP1Zy~eeUFa@Ev~HX@9F+0U z5p@agn!!$-CqK)6>U%1ANvpvFe^?PT?k!rC0|d#pa%C&BO6nMJcd6QHvXD2BNyTT= z*H&pVd0tV?POtjZB~q`jba?pW{w!kz>cI_Aam*GK>`|Jr$VOp6i#%w%fcubUwVSVZi+M4J8A9GIG5e~{S^D@SzRGZROgTe z{GWES$r324Cy{~Mw2*Bfn&|bR-|tD@^T@1lIMw|6LG7~JL4C5CVK!txEVDoC6Hi=( zu{ld28D`;Zpw=T&Zcr7C-=!`UG<88)rK2H%)V!|B`T#fJVZLOhsKLdfh5)~zZE{_WBRk5cF-nKQF2hx z;S-Z-9$SIh|vl5nhNk0kjcjtdrX{K zbVg~Pl;Uh-zEL-%3BDdJ3SXQQ^vjx_e%~o>s?Z0f?e$0g+?^hue6CMc^F)yOi5bJc zyN>0HZol%M13zr_neUuxe|%8<)xPz4_TLucWP{rL$>b13mQ!j{Cz!0~+>I)> zfda zxqFW_6}nH^+lmQWt*lN9{41=~rC)htMTp=OV;uKC*HLsU}@`#{Jg<3ozs$v$Ct7_W>wE5&WvZRh3l2Ff~}2T20!uZTkYUh>MPEWvvtkE`+Up^#dF1Z>1r1?&--C0`SA66 zPEJmE12*6p0YMja9fX1@dtVU8E)4sOu7n6##5T1D!!L=bfAoGy$6Cc4ImZwe?%h&{q2!9e|?E|H%no_&z)bnUv&!4d^6l@4cC!Z*m@wZKDM*Q9bHw6lQ)daQ(M1_0GYOXM0 zfzj?%mO4?@t~jPMy8A@(_-I6sQ9WMl0c{MzY$1fdhp1xDuh^=c#V9vwcFx!9<4$o= z>I_W--p~0PbvY42*#ZAY{Rf4bwYY}nV^c=6f7f`PKiK>UtnI`O_h)&60e_Y6985C) zR#+6@Bd0DWj@)BZZvpaq~l&8<5LN$E-D)~tbb%GV8SU((B2Jw{rY&D5{`|i7(#@@L?;kE}eqRI{`K^$>K z*3t@#S~UoiVXkQ$mgmHs8p0N5DR&ysNWp%4hoM`|y@(-)k3a|t)SgcxKMo?VTZ$8q z2`p)T0`CCX&Q5m1gBpi#4swnvwKr-vucI=GXLpGl;8n^*5&#k&)ZEe0Q42V-<~W$l zA`$Ik7+`27-BbRL0~bUXF+^R~*kE>5Q*LQo_DUL4%8@y=Bs?pPv)9C;5x>jY7v*u% zE#mtoIZc?EX>6}&$SpE%{k1+ZB=1v~;!Mc&b=M|dr(fo^87=8pj}!2tQ%?nr*79vI z#l-35(3ylohtWdih1m^}iPodic&BCU^Ix1cjrsklH=M>t^piJR57mQX(|)EX;*UL( z=5+qE0*|4aw^{hO5cWbP_de5U2Yh6D=K=;rTHidVc^5%YlZBxb6&1=Ve@9J1qAf8{ z-cIOWQt!-lg`f3;VaQZZBq1cWnh|a%FsdfoF-O{lQ#0WgxHJ%qQ1W-Z_$atiwoGr% z8S3>UsVveHZ}j4<;Tsh~u_!8^biI7+#;fK0IKS>h>6QOeDAaRy)_pi(1yvvPgs7ap z;^03!udkXRh$62$`-@C}_c3)jp6w*BZPVu%JyAu(p;1~~N4Rj^DfaQnNDeHHy(*{N zEtuFlYyGmL!uska5oJzET3EN*vpzG}lfOCt8!2GKpM0_IwPB$ZTDcz|emWo)!$9+G zL_zWiM##jeyfst!{MU(7iWDb!TtrwezgP0T;y;K=X|`V$m5yx2ApEnuZn!p}y#`B! zQ=M$r%_LlFFke@^OxrZWBY9|Ft4 zcy}^fK|en~CrH!UBu_0tz6VarVjzZ}@G}?ZDHovlmlaMfFZ1JQ9vpgJdyYI^D|EHs z*%hZtCrd;GW|!*(F_g3?@4DxuM(vDPTWI0WN91=~CEBkNEx%}%2`h6FhcU_C;q0l$ zlPj_GZMOL0qICO~eyC8&Q;9dyoQ>~VNIMkS6hA|xYJ^$LtuUO_j^EV0fT7a_l-g-+ zmue4iS(7l8==@$&(qNp;F;HH3H9PR+(3=*6NfU>E|9V+VW37D+X8@w8L0=pjwdEmO zDqwZ`-_SpRSsD!q)S#dL(4EQqh6NfkLy6R0h>-Q5+zDjH|HMy=g{sq>cfpE@^RA&) zw)=AAoeI33>v5H=H{hwG@MiSDr-Z7i+QxKTT|EWkb^?7E`CblsElRv2WrB^Kon>>m z4ZmtGdE$~RA_#mMCSuP@L36CL$7#U||L%VpJDlFb8^l6B!rEOFyWi%e&raC875w4k z;rQis)Loq>$L}t=rMtHiazuc*u#dO5WAfleW2j0ggKrc|KN~IWNJ{;`eno8Oy)ml! zO<0_Ey>fASpnS+rN3szVQc2Uo`cbtjCQ3)WpuWgdVm?j2FT+-iovC~Zal$0U=@JE; zX&1kw8e1YYkKH|zp($TBG3Xi^61xBM^Udv##KDx*R0UUy92SukQ`%GE=Gh&y0PtpF z&=hX${4ooWC$ZvJf8F`z?b}u7Ia((F!Z?+Ufu5fc=QO2ObzA z@|?+tNj)jjy+7~V2Q4)@Pv;{GuhY}3;+GCLzF_~?I$OCWtniN}?{aZhAKVVGQfzLG z85t_S*0m{)#_U6q7LbkV{#+@i0B~&JfD0-FGYeHKVJ16MJ?OsQXIEG*wnZqPx8W;q2zXoQ$5I~qUqUc=Qq*ZRYxzspbT<=O zf4?RbZ7=xU?TL?v%k?JD(8!RvmkZ9bKlBgk^Yz4tXS1`VLjZxlHq@%YO1NCqt2wS09RMFKwe_mH&wLq zMH=5fZXrs3Ny*Sk)7FmxgT@F;OAdTOrtJ#wmBTLv6KEiPNNPNqw$m2?KNPwNH7chF zNw^9ivihiY_q#n53lf7Z4=KMt$a1sW<+nfUsuQ7{nDE~vsv0$sX9!?9mEVa_=io=A zUE|STmM5%yDOIkXGJikoqu_G`GJ_} z&wvYGcjM^qKjQAcDLY)7h}anx3urRn4j$PAjEN?-&}m5}O#Mx0L=0AfNZ*t?8}v!X6CJnVr z&Ju>+Vc_Sunsv^rtbkxh_bzC7R_#sj0va#(ecMYHKashUb0}oPcNt@g96s< z{XKFL?wj;V?|9IOw@%yf8pc?FP>BdO7M7#O`}clk@bN$C1Ls(m++6`VkL42#T~~e# zxUY{@$n3{G8DU=Z%xB>Eb7&{=*~S%(_yCMW^X_l`n16)MU;}$>QOd90G2PQw6gSHHMtXv7C1>J(CMTcbF+K}F zfz}O>Z3zY5Fi>@(g@R+#k93$&V$~#`=~`1N1#`5coE|Ed#ymoln=W?84EKv0T$i|8 z2$npKQ=%a9!TF`6YCH&pD@k*k5luz?OQYd78sgOpDc) z#))$pTS6EQnveM$OR_qA{M_m&=HNdUL-NE4ET`gn5dlv)$vd7~CLj>XWYzQf$|9j^kC3B)XG^s50; zUL+j)W?66pPwZ+=od?cl5Te+SH`e3GnNML=5^7ddPmj+6IeGr-=ljfm*9#}WCZ5Q< zg3$Qs&v!=yMii5c(h{PP7YDz)4;R}$c3s>yg5ss71mgZr5|H5~Ox#0TJCI`@~ z%n^GQ1ipW}H2lT$aKvF~WVpW{->pWo|46qU%H0T51S&Fn_m7=`y^XW+p0@EHgG&D0 zF~2*O!E2v5ZOf>}0odkLp>cHVSzvGCKz}#K^R5)(|JG#DkTP3+$1m~PycCh&MBNX5 zdr=$bA1=iNFz)#5RF)4SG5vE!RdvUf-O5!Ar?V z?awl8{um%E4Ek&pL9WC9X9Rrc#=;E_&VEUb#>Dk(ijwNgs6tc#1l+)GNjAS&1o-y- z^c45~=}+>Zkr#Qt_PDLO)It!?M8~k3YbLB1h%GN#(dg||tqF#)yt?+9E2bulu@?4$ zly_;!J}R4cOG`_g@SyDG={o5uVF(IpO-Mwv z{5VnNA=Gv06!CPNRnw{<>J>DoD~3puu-iZVSq%+PS@4;J?c#&v!c1zL5ZX#)@Ncymd%YtHd_hRS zmE}Q6=prlO+OSX4zp>n9t~O1eLe<3IY3HK(>$e&x^Hf!_RzO z(^X#;yMh2q&+G4|>Ar5s^@ATMd_$Nb;4)r#3E^ma&TEEug9tJL;Z3OidFQ#9;9{rR zd|i^e`9DPLdrBG0wg;OJu8#D6g-*Y_q+8#SGfwSrh=g@N7Dh-Wru<=IrHs?N>BVh* z*%ZOIe!YOh4U|r4(%G(T)r;T%N-P!gV|j;ow`4dgYKgCLri(J-DXlMdnMg(06MUYk zdMs$Is<7nS`!nAkJ0Totp^Et)=WHZ9+*A6bPb|~_RhIv<_|7Pki6f{W^Yp{wqw^BO8D;M$h+U>+D04%gLWf&J}L<41Oxht84~C zbvbVfBa@Gu*b13wSm0P#GI)>3wF+DL3TD(k{*dn=SUwLg7}cidDwwcK=RzU@o}}*U za3l@OrD*)*r3R&R&(LYY6P$Nq@MuOLWaXEFwESGncmf&>e=AwxUZCe_BHSkzA5#qE zZqjrQC@PX_f(lv>FuJvM=Pb&3p3g~XmVAxCgOCNc0>$fj>Q3@u+(;BqM3HD4FW>J=TxPgZ z9!c_((Xg1;Gid5*E_qbEz{{yGuy@&antFl4`?>QRr}Nsol7zzo6|}or5qRxrks}av z$kcV1E)g5+xcnoE`slD!k9XZ5H3%2dKTU;~TQ8Ytg@~o8Pz#M}yZQ}Od-rd)qe2kB zQyfZS{y^`}o@7e25cOh6s$9~4-59uMe}3JJFNiOvsiHxIkAG{}Y?8TLGS4`d3#{aS z8y_EUefjdG8Kvy+i(w!OlS8V^w1)QF#v4Xu>7M&j@qDk#Rq+A|2+`|JkvxOxxy_nW z&0|II!!~}8h{=TYnni9b+l+nf*TDU^fm!FB ziNv{-KvvDXsMQ}!j@k84&V(w#{ac+;a(5s6M}kTlNZuOlZ1^{u_`mn{y`^cWI>&C{2dF53 z>}IFs4n|ECGooJpp+V^F93abz`@#mhUsuj+bKCUUO<$Aase)#LB&QGtit6!Oiz1tM zRJN71Rrp<+0m2(iF}#1EFL1wakf+mqEPR-uEa!Z9 zRoH-Q)_f#o%>FELRA_S2Vt(XAc$aQoIzOSK+UMdx6cB2xImc#LtHjt+4nZ&#m~8Lw z^MHwWPD4XOZDe7q2HFON8Bucc_}K8G>xjxJ^KMpL)~?8bi&y?G)Y9$Fw6fR*DOB%^ z6$^<~&_O*AQUdv=8f#7n30b4C5T zgD&!uHumekOO(6ATjKm&Fe7jJ2DF|kq;e#F-pqAvj7sY6j|{(p@Y(FL#VJFL+W{ZR zy@*q1!M$vR1c(Tj?(XhZ1_vrpkpQo;xZf(C83Hh9H_b{*NddNQS()-z2>0!=F>nNl z-y~o=eKAnegyU`>7Mr{0eE9MAD~a|RJdM8kRe#;iwi^i!Et4&xq(MYD0)Yy-{OwF1 zPK)8tNkH{;w7RV>;_8>7@7As2SLN`ud!Srtb*pTRFrv5hlIhHSC0;Xlcj zinu)XRXZtjmZz7n9E%`D8I2XTT&?iK9=bxsL#;3*$14W1LZ?MYR@T6QN^<6?#_;D{ zz5&x*pj)=~1D;^y<5v@>3$BaFPC@}iEZ23-1uqgcQz2m=J|s2VVKUG=tsJI53kl40 zgvmfckwD(Z`!5jVDf&xGSQgjnh2+Ehz!X^hnu+^f9yNaawzj7?3r?VK0rU<^Y8f!foBWqbw{_x=Tc8=s$1X(2d?1SYWlRDUqiymGc3JCHy&yg>_=GLQM8dp&Q~zZh&l_kR>^$atO_2TEazK+0H;M_K zGI4L)^IY{|p{R4#lC1EqAi?SE;v%5bsC*>XZ)`nD%{UVRBw@yZy6Xlw4u{Li%G!YJ zonTa5-Ni$XB}R!w%S$=fnJsnw!IQ666#RmXMX7+jNoV!r=_1Lsbkdpb*I|$^fxNxX zKXiI{5^=ma_YPSxFg5Dw>7iR{_oX-UK9u|Y*ez#F0-arIJWN`|&de0j<$Eyizco^% z|6K`mznwk5(JTGJi%J%lbCj_em2+cQt%^~YW+bBR!f?b=(9)zpV`Ba`HtLS$GaEpD z+U815c$zDnZp~Xr0`t7sQ4^9QKKtuRnCoL0)zDZM5Q@RGal|4nC1;yW#aOZKI!f2S zRLwNqPD`cv zD4%a2Ms{y+@2ulen;}9rJx-eBEu0Aj5g<2$VEJq| z@gD!Mf%lC8d$1*^68&g}FE;uQT^2QUS2w+!ZC%{iv8aHwXr%Q0FYL?f=~l#-ElN)HgrCc5D%rRWQS`2NfswFC~WSHXM&fFYl<#9Y~8es>}iZ{Jq6n*KLx8jPln{SC@v;TKy|XW?z-9+){4 z_nNE!7$`K?|l=E#*S z0`_p<_klD(i8F&e#NKORIiJS~nj#M;2Gfj<80hG@&(F{OUEjSshHc#*KIvDY3UgrT z;3z0Dj_Vo`tW4;@!rn*d$jAmBzZT3!1_rN7APf46J*4~VC0x_!8@Z{Jm}Oogp3;O` z03_~l#KZ`T2#tMvv?gvhW{)18Y9wBhzX$Z;YR}6g9{;7ETz2APRLV@p&vEwRSG2Mi z@qSpk7|{_eb!1Hu5rIlMdl<#ocVg-&S~8X>Ic))`Y_>NX{-GzB;K(^zVS*oGLEy_- z01Z4~fQ770wR8I3ZZ59`JoBb%4Wr@EZy_ebwf#Jk(R+DvG7Hw$HRR9DatUC13B#k! zn9w&A&U*|8-KrY&J*)r1yq$E_u6sD5Ge4p4adqVHLMf#V>;gub6SO-FD`(9Jg3NI` zff#8~hc{TL+k(bMeL89XbbLZWUUfCKMVoq~tR(H3yXK`b!MMIrKy^q8GK9?)8ch|y zy<42yXw<*wOFHQq$$x65X0Obgqs6WuFP2E~xS-GZ!&;~GI|NDBaZOsr#Q-Jmnw@kz zI#v6*&0U3~{4xYxFOFkwxjAjf;_X=9G`=(M!hR+YqNx5%7@v|DkAFmlK7@r}n`)mo z`(_(LBTnjRjm);P4<^_mf83;~GKNGU$oN}H)`q@u6$M#lx8`nLTW<4G{5vQpH86NU4{ zX;*KAko{ex`}pBzsof{wdfu*5IF|0|nzmHuq}5V+9QxEKe%x%<@3DCeC-h9GVxCPYqQgKrs9evvo#^0_8Q?~oU^MOfd*?>%GZ zEiN;u-=Z=FMcO|Uv5>fX3mPD-v)0joQCyDVWOqyz%lx?FOY|pVeyW_tVAm!QQL#oD z$(>p=d?@VjWozi)AIf{wdzjflK@^k}F8@aSgd2L`0do_X_vDG}ZU90baQ3`FfaeEs zen2OSO6aZ34sPMdrb0UgJ@S6n|YWfZ1D1CfG|iUFjBu_y)@uGO)`ZM$Mm%phd$~I zTgJY#>FbJ>x{Sm-^@WSLiR#sHw#%t8SK=$YB64%5Rg!Zb@9T65&5)TRb;+KL>dPMd zKjY)x+USseX>(>)u#kO^r2zfiBpq2xDJ7eyE$g9=Q~!=Q;I4bXT-IowuQ#dT!i)9R z0uKSr5*G^^mGpyK_WHryvNcfT@FMD9wc7Dfoc%!aiLtvl<5iEn`!~SDAt+Y{G=XjB zzePK*h7)xP)g5dX?pP;!p}?+}W+}rdh<_7v-%Hs$Jfs9!Gs&PSo(M9BEkEBGjk)cc zVxBJIiW**ZdJ)KBuUpRb@)4+s1f$kA7dhWZsoJhlQH;4>i#H@%LnFNQ@SJ>P8O3-M? zTcO9|J*-DNKZrzip%nLVa%5eb$6VQ+Q~r)|7FhFEt`t&ADFilFwT{Q0fbRL`e9VAv ze@}>vDe$2HxPuEFmAr#vd6&RXsbXGfq7eM=9p#BLD~`T|?rOeSK5^<#a30YBtucHc z3~;xy<9cU;tFo=ebmu`csGA)Ju=7Kve)m53{q)`at=GRH!O<#EE@>_(C{Rs5c?;k- zNkUu#W&D$~s}ITH2xI-L*#zuX&Fncr5%jx2i+A3Hk%P87oEx06xeim@UC^8B)_?IQ zjH~;2pbtlwtfi!cYb^Q^f1t>pE_dhjr*7{pUU|!; z5-Kc&x5P^S*)B~wQe{7171W?tC@|3N8KwFtBNHH~8ITp%(sMZGPfBrM{lITu4*~Np zdS6vvN`OI8d$XPQXW#Tqy&gqgR~8Zk}+?s_N}RthT*c7nL#IC z0JME6Y>da*D07t(loWQxgf zqi}OH5s&S1`)QFw^Q%n&ni(%)h4icdsiKB2B&2h)8Dp}sufMBT8p@A<$lrh82wq>t zbm;*9E868ttj>2EDw*!Bru>)k6TwilI)i%J-JgTjjj2*ONSx@Br(8@_l=;QM;tTk{ zPYLfZyL5n`r9POolA+MEk?M0j1yVqNOdg^&X4zQ(7uA zK|mCZ@v;~}VnYnNYs|A8o$hCcyZ!G5D07yxvnIW!O@-0u($O%c0>lW8oS3wSP`RYW z#G}GZFr19iBHhlT6G-hj{B@sxVgs1r_a04qEP1`%h%~!><+(XT-RYOsKru^kQn#9a z`k^Xl7z?fL$WdNlwi)0(GFm%e(&_(FgoZ%7Sl56mj4~L>0bC{|)Qm|{nAqVb6Z8e5 zF>!J+R3qL%Nq+ABp8l`zzFJ1N1wY7pm$-NhO zW-0b*-Tcj2!NEKWh0v*z3d4KM7{aNe)b*Pquy~LM5kEbLw*HT9BSIMerJ4P@X$g@2 z)3Bf+ahH~U@&LjWZxaWT`9#h9&ZcKl1>Z*L&pFw7k?HEus;d&=!$t4_5$Kt?1)%Kp z+SV4A(f=}MuGNE&hYUAJz&%hvk0s<8A-Y+XxBiPFj*z|of>AIUhx03$NKG7#D~d#k zx!SUbk|p>(0w9fdU74|C`1KE=X?al2L%91~RF^U7r(Ji0dA4w+1(8(~ujkaa!q8>^ zqp1u=Gq|v?o^v?nk+nl-Y<8$=_+Ju9t%y_}<#C_9fA8r!Q=fmw^~L+#=;rBEG;qMR z#bD!tR&==oHP?I6@c#cDci*a{gzgLJM>B(F*+cWU5SINOzWC5(g{ykJ2NkNY0C0y7 z7L+Y+heQuYM9B}=l@Ie`cj7i4b`G=3Hj{xMG1|OcltO=obJ5lq#8f(i^WIZ&aTSgN z)r1MWZ?9b95XT(AS{S6)Yv11lT}&E?wt%)|GQ1^}oI(?SR4ItSXbm4T<^fwDLjV5y zN@BlQKuM5Ke12l?khWm!-EP3Id6qDp&3-UO*!z9_?cTe*oHsY~hWG_$&(G8}V{{a8 z3yKTQ#_aa##V5Z~TWs=t9}_!GI3gv0v)VrgUJYT6?3-s&#xkPJ%7f4(WJPI>)d(28 zDDCGLtrBUtttp+`pEoxzI?_+R|9y)>)?7UY!Ibf#p`j1!d%tRDmp#{`HyH1a!kG(( zH?&j?_n&H{nmOrAC}4DXJF?+D*Ae#Xm)+Xh5)cpw?CzGCu@~6_rO{CB;wfYZ9Tq-@ zGt1RHU#*|nuapUEI?28%dAgF(ATBa^#I?U?US(K&9>(@#ENc-;fzGNu%>c>!B@Afs z%>s+VArvwuRU12nVUwTnT@DhwP8#=B%8B&0!IhTBX>);87N-i7TnuFD-P>9n`v2Gw z@YN543p&LyKgsPF`7J_D@}=kbcQI3OFDf4eEG>Q-v)FAN(uN}HFH{BY_Ro_xJwm1N z<&?^=-e)~{d@U^O1jlizYN1A!>D6H8t?H+VCGkadD6X{MD`b@x5EuH|hKN$e zz2ZWAe0=^d``l^UagYw0YFqD}W5P>BDoY5hA`X}k+kn>$Ll7yvJ#evzJC!P4z4_Ab zRX&Q+{WR?yW@w*Q?x-Je$Oi; zT`WW$8E+Kj0SkLSpLF7L{wbY^zc!5_6rPgqW;tu$|A1bB4YQ z-#DmLyj=8csi~0#2R;G@X?NeB*98%bUiC)hg8hWb#b4*D0s|xJ{3jA-`(c{@vFtms zWeVN&+kA_FhI-fI-MACJ($Aw@EC5sod4@zO^Waj&)~yL+cN;AlOMSS`dDFsojZ@NY z*6pc=ifSb?`BC;Bw8dK^HF;ZqCWYvqMRm!?cDPDjYdv&apQ8zmt69#OfQ-{tY3y?J&Xw=@*KOI)Xx!27p2?XWNjg-~k0Wg9Frep8X%c%l(Lff4yE0BMAJw z{|gc(10KNh@Nl#OLysq+z1Al zWhlj^QuWXJc;hui5!9uwF@KCpvfy)9b z3y{xXX1DI2%~|YNB&!l)Vr`Zw&w<(WwN%Gd&DV2-SE|jODA`e6hUV0q4E8e_IecoA zP_o*VuwI=lspkS09W5<_s zesjnNGGi+{K^oG-#RVAqQrhUaNHSY#wsmrHlBU=t_Au1&CBOF4%Vy!|cs23r#>I%u z^yGE&o3Hgy)QpRb5W6~hk(Y93|3>x@`>t>LU*q~Y4)BzpzpH;>z%WNMo3HJt=jdcq z`XSca$0vXb9+U=w$R_H&g4AZkvk6_7>E48}5F?c6gLy+jSXEY<-G^{EyD-BO`k;Yv zDsfF@P2dUQU2F;;Y%FwnYiqjLaLilsL#Qaza@2ob<&d=N#XG3V``NF82i;LjrlE=I z3km2nK@^8#A#4c6jM^Ucwxb?5^_X1bD1Vv@}8y#k&#C3`48IUmXS>^}coQ$ezXawxxYC@4<)19X*Et!f4 z3H6msKbG`7GC$i$HoIp8!ORcZ+!WaJQuLd_08{ozgG5&ZqUIw(5#*b^o$sRBqiKD2 z0)A&sge{+RGIV%@+@!^YQQv8K;A~;CNm7^yhv#&@G234!7&<-AIF`NgOmC-8yl(`4 z=>H6}gewkE7*q2SN=!kAj31Xot8!N7b$(ooHr->`AIQuI-sUB@_m!F^_OT68eKpT!RlypHDpyP5I0Mq@>qlPy6%{}oY7dXa@Me1k0nth z^uNc6D<@eGw_AjCVn>}|#B6(l_C!CH0Sqcd`yCG^YnwCcW+W@_RT8jV1Lb8xR!!i# z?2nK+E~R!4|1kXBAsYGX8~LI(OIY^7cWOe_?C!2QVkdQZQRVgjCU4lD5Syg3_VB}U zN?oS7-bCR~2|Upqs8HbRcR$Uc;@{XE6UyrGjZXCps!WM|1APjUF8?w*eJu-buB&tV zyB;fjFWq@sz~|Ta3rr-uI6O&BrH*+Woz;(ZjC{;x^Xl+SbYwH*C$mcK_6cCqe=I?b|n{~G*Z7)Q_M~NHSB7mKe z`8E@i7*2&eq7OmAU?f0YKo=B}?F*YREHVrfGS9B7w7?>f#XfnC&K|<~@U7s@?<|K` z-W4C3L*LX3C6AjpTDUZa7Hp80)xnH%p2)%Sr#nK?xdw1e-Y0H}kwpo3AWZUqFMY1< z^29Oy(%jNgpk&`e3_9v&T{pfWZ|A0B_@|HM=Y2R~OD_K?_NRya{mPI%%<*LJ54AUR`Mr{sG4 z_55G$z<{-#uN%!ljO=4a=O@;eLV=??1LU`T-l4LAdXLFlJ3(3H0Hl&HU$v57_W=ac68i9BfPi7_vgpbRoBEZ7N9srQ82tFX-@wtC<#n^) z_;8ZkdAs3ZRGtJw^W%G!PXAQ>cjsO4R=Svb*E9c?LAzmfqn9gkVbs`a(h7>^yLI$W zOByAyGK<{8!5n!UIoLQ`AcKY?!KF%eeb#?}77XvjCcF&>tq|sQox4akHE2oe?pj6& zfRuqx8duFblT&Rs-!FtLdJXir#S%;M6#wlMjY2I)5xiITEDvk>va7{3B0Fl3&nO;< zi5bq{HI($)FgVJ}_&9WF`n===@7dF}CnHU&`j-%aao{=El_8_E9s;*_O2wW`>RnG+e1p9c0lAUGd+fp|=s z&g;_7>%WQ&UVjeXdw3k0M-eYPr|N}q<3(6t+@n}dE+fu2TYE}Ee%d$c_z|DXuS<<* zus6$gi1R%mFhRvVd4&n+j>WmQCG@*+>hm1<`nhCniUf^5O$G@*Gou0bC(;ii9vvr} z#!l_;DbZ6joumYv48YOsK@ahBa++m~HcO?RYUiDv9}8p@ReHdj(tei6F_8h!iXmX$ z;5TN@Ip7Wj5(H+FE@(Ud(Z>2+PyR1)*?#XesE|3h;=1F)(20(W_%A-2Cgq&={m_?$ z{ivMvz|vMJJC9Mdnql=(o2cXc)lY@ge<*Vx{Q^%El zz^P)uX`^#Uq-%#B;C$lymg!=r0D?A)3GsFr`a@~~$*{g`su4p*U+aXZc%>!o~E?UudKx)+nq^ zcXct6QcJme7a&o=P4vaN@+l7RxgkSztRW_Vn$v~_0{VXraVgUG#kyZi%qbg569Q`^ z@sBR)ui!vjo?`6z!_UgwpM^0DlGnH3ztxR|<5r_k92}8UDC6HKb>w#_rgUm-`X5yX zGZ7_eyDu-ZcrGKVFXCa+_`NfIeSP6?M>FrncPl&ZCqc}jI`hkT_qDOe$xRoi1StCI&z)2 zgPp#o;Dbo^@USpKm<*z6emUxKlqH5kPZk6wzhBphbn28;u4{d>rrmb6B0;?01j*(R z=`Qoj(b$>YGFo{_ip1}A20@loxFMp-e4N(Vglgqf=}|ek5nUluG(_I-;$;& z$f+AON*V{l>Ss!UWARF$Df&~S?%RshR^GRv-Hvr;l?kQvkDfN#cX1C64n{VD7M4p8 zb=vL&ZhT#(uYPE-4Uxa*wgKXvPcp)Q%gzjJ;Pd(F-hcy(q7>R!Hd8k#|87{E065m% z=k9(_xk*2$y~7r$__>(a!!M#Z-hI>)-1%H*2Haoyfxvs|7T2wj;Q#64gmC-cfIv*o zG;&1{%2`0e95p>3P~PLD{duO*j%iOgpU1s{AxG+TRp;UA>gq<+@%QCXhR4#=n>@z5 z?FrC1d%$8y<;w&;wwV_S)W84^GQta|)G*NMDd>RF2t)RbE6(aN9YAUF-YTOZL$L`w ziK<m!I9--yCX~w7NSm{OSR( z%b%RvXJ5J1zSjcGL>S+pkk|Gsmb6cgMG1i08lmfSR(oYKX*`%XW<=BSPYmcL!}`IG z&cpw*=O!n0=olF*rEm6XeHa36!Hue=r4RTO{@E9bg#b7-(T7}0*n#S)s3gnLvI71< zrTxsi|70X%`m-`KGn-u2`wPJHn-Q>E-pFg#{O*lAxcSEX(Zi6;o_?zyDowPZL2^`A z!%fQcw`Bs=)#=ff@D03<_g|rCL)OprZ_N<*-Na{KmC@02g`Ejv+?gyk~ zWMqJ|(q>{Ln0)OIu2Rzf`)RM-xRGKpOObWTN*V{K31L@VuS|rnRtKq5qv$v;FJwu6 zu9QtSd}4VB1W+dapQ+=pfr${w;@{-nYtvqu_{b0BzqTl0kln*WzWurCyj^g?Vvu&* z9=n{Io2vk=Hd_5uk*@eHlb^1b`VZ!p)m07Wm*Jx5X$<%Lj4!dvtsaN{(X^r!|Hs~Y zz*Ti^+rwzo#3WYi8pYmw?*%oQ7&U6Fu~(uf*sx*m1r-q$6f7vBf=Wj~upzw{r7QLx zqcMqPeq-!&mO1jcx&L=x`ON!Weh&9O`|Q2;T4T&P*Ia9tGv43gM&$h@*VCB+^ISW2 z>=;mO{h{B}XWRQ`%qkXoc61Bt7U8XmzIl#3twg-1Q_)ta(Tb`@lVTk!8kOD^(#*)> zVT-UC!Q-0aSI}<0d;5B+O^xo|&f{K;ueSA_JjKGo!ewxSa-RFwSUwDL8|oMWi+_Rv z(L?;_=P@pL$!q-beDIr}uhxib(xl0$=-61_4tFg}EDAQ75b0ZJ!pnAZZ>^YQRJ#6e z2MYJ7-($!8M@t5*Y*YH#(~h&dyA8~2-RJB`m!m@(;#a_DL6+AjCWp6U9lCMj#szfU ztCj?Ji_gj$`>@HP84DtflfBNIY0p^(*Hv z1n#jnay~P9=9lDjI^AS#xzCH9_!PHFxn2JCX-sw|v^*yI|Lo?;6x?XTP>p_vQo7byq)L&x_|TiCQJGyUvIZ* z@l><<-%x)Y^%(72-|GDxl3W^dUpL+J0Dx;6>9SBJgZuZ z`oGHT|IN}Tqh5|U3O{wKF{XLxI>FDp=Z?5{<94CP(Ix|H|Ni2~Nj|x|s!e?o@Mi7^ z)4#bax1tM}6Xa4psV58q-#}bwopWH1JVe?(y zTbn4W(@AZsW-hhxTO57X^3?%vXTRw0%u2*G-*U3?)M5Y~tbZA~JKq@NwC1PrW&1R|HXUN1aFg7r*P=zwF+ii@S>!8D*5arcZsN9j;AE zUb|U#liQwG2W|~Sclqh6n?<^o{4CMNwdLYF`=6|7{qm;K#fs(R_G`P0Td!?8jK606 zxpTc_WaA-s%#CI=Upm-mN6})ItM_{!sPn=s?ZEAz`9o8e4~abGH{95yc(t2ryKk|N zTxtA2Uw&h+o!G%1l& zJY`VQ^M@(F&75lVaAJ{B4&A?~bhAfjR_r7r%h4T6nk3G8*<^6|7tJhtAAMs%yA%-r#P`p=_Q z*RcAoAKocg!Eazjte3Oh9@kk--G-hB7}s}4MWe*X=SID&7;j8^GOCx;{AErvpFd4| zS}ZQ=%XTG<5+BrWd#%pI@FRVa+E}4Q%0 zTKL_lSGM!#SenK3IE$Ye9pQAeJo<`z9pbCs%NuI(YUHJI%c^E|ACc-aEkRB__s@Ck zGSKz5;{neq*?0+FTvXJTHzV^RZw@H#yrof^~A_pk8iG8(yF;)6OzKP_oA>gu$% z)6%}|aWOs9?rB2%)sO0pU(`9_iRA)YkB(2O?6Uc;^VdTb*PnVI`TAD7v?&i=ntYr0 zd*Qx2noSt>=%BHEu7`E0;4i8jbE>+>7LUgTtoY7s@#4r;8R5W^ScXjOK>L;A* zcb~I(T6%-WXDgVD+~OWoy^)h^lejt4Q=$T$cRTwn^Rg-0t`v;#m(LfcF0a1y+|Zii zw%p&j@W;&ML&nv=9gx&F@v2XJicewF>SH=L|IFp9wEm+q-;KsEa!o%vsI1ZRYB_Fq z9v1mBb%_0l_+7qfjqg2dd$QBOs^;B2u1(XK}kX0Z-QPJXz_#7eb&q_x9F7XKqt?a$zz8u z8N6+l*GiLyD~`e8hgKI`*YjMZQNGw&dbk# zp7zBb!#~;J(#*ed8&~+($;&Kza*a*u z(75Dn-e@$G9}+9qH_vG`F7cYo?seSXfXzMo|@cL<$5KmA^@kTQ2OX576s ztwzWJ-=u}<12fDzo4Y>t{?|X$dn49coFAWK+y6H5*MXl6>&bP) zb{N`@YO^5^hWr@vmY>stc*m*!*ZvARfBrhE`l~jncB*zOUKC%7OG6F6K#LT<#JV9$ z;B!GJiS7T>DCoT0W>`P2tGaV(*e=yp)po^&;zn_$xKmu_=eQspaH{{Yzx?MjtdFXr zsx#Z5+Qas#wyL(15AvcoQ=BU8O5r=Av;oRsU7;-26^yZN2)v`)=&w*7-~S{0)p7jJ z@H|{sb>LFfjZ4FNtG1}NskW-NEBjE~D6S0S1^HCmDy|Lr&#w)3|8M=vU%ooGs*9?d z(mlWChI+I8V*RgB4d4GG{MB*%PMw2G z!}F^;sXD4Ut2QXxQEgOhrk#)n@}Xjgp*`fcg}<_c{2HOi|MFk{GXGz6ryZ+!r)=C% zZ^eP~HF8AW3^gUcfrOg3d3@^Zg?JbZWC-L zRNYivS#Q=~*^ROnwv+8;yA=nD6GMAY+$k=}tBMK#TXs+g=Ts<2`~3dSI;yyCs5$MO ze$6)1HOKL0Zq?$32q7MJQA>YS<$s!pnotS{@W+MsNO_MvQp z?PmK;0P;aUAV10$$eXeS@=0FFFL@^4$_A(d^-$VSr$YIFkN>H^{Kx0lo_^2s(-v75 z+9vBqpJxnXJfkgF1Q_=i1C{2ruui`q57e4Ir8I7i^_Bp6Rhm)*YSt0x1atrR2e<6KcUW{&dKwuy0BiXAM45bvfi{8wu9|qyVyQu z8*De*PaeoeEx-gIPvncdkw5ZCKFKTjCC~X|0rmLrvVlS<%MgX}nV@~~96T3oj@r`} zmG*`{Ujf@$f7&zKQxm8Su&s=hN^|-PeTN*;zsMUoB&Xz-eD?%;0o1EE&+*v;9p0@cL5p~pxp6-HrQVKmnBLw(1kI)+PC7Ry!V;Q4r7RR`9G^$ z-LT%Y5w?TvVY}Eqwv+Ac2(bO+LB$4T2jq*qkw5ZCJ}Uy{0P;+}$vgS~&)5KV|EGU{ zr9bWLQ=m9N`{VhQ-?MJC)5^eCfC)fbrcE~jngVPm+sk%$0_Yn(0CLk8AYbH-91a4= zDLEbnC|#)uwHpKc2#f{B=`kM53BW`>CSgfTHoz3D|05Xgr;b%+O~7$1U!9BR<@s3; z)<@Y2>&g1E-mE{{!S*P7U_05~9st`<9t`b(JdrQ*RvRz@z5>WA`6bU~0P;@$sRQ+( zF2w-)=_f#8Jqlr|fYrbJQ~PnLj%QiQ_jwM+o6iBByChH=U~DQ#`})|pUSTAKCtS{@$`WxDVp)Ii8Y<~}ce2^FNL!QW2BY^zX226mj zfXV=QCg0?p{8NX|0qXK8P!uQvP`AQ*P)`M{{=+}D50~m#mc#N`F3UI6pEgKaq)jqD zRRwAQwSc-nLxA>7zh^GQHnOcf0k*wA@GU@Is5|592!Q@X-y)wAfJwj0>JvS9c&NV z#rCnCY%kl*_LB$lp=^LWkuUPr5Fn4_lf05&@|-^g{7>mm?Unw;u&(s~0_!D!Qb1Xt zJV4v52ABY~f%-rbpgGV6Xb*G&dH`&5KVSerE`|c+hA~w6`$Viy1`IW)KPtUvV|@-_ zsIk&@0k$tJRH%?u(V|5cl`UI#Mf2v(ZF=_Xx&OQGzH=QmY}lDmqel7u_~VZO1lO_c zG{56M+!x0z=dmbbAItplpHlXFJ9MY!}CnZEaIeo;>*=Ffj0SNJxl;hK33rFB7~Y zw?Oba?!$d~43EY8kWyJz@7}!~QT7U)gZ9AlDLY_2lpU~stf#U8)}QT|0I*#u4zRs! zH``Ah$VX2ge+(dhb)Zz6QPp1_DC>`UK-+K`~$&w$A|G$AQX~ zD=!*7dbG>_{rl6;ojdnCeV1BYy?Ryf?i%PBC9$zF5+4^Q2?_C%n3y0*Nr@Vglar(% zsPDNikBN_u)5>63k&zKv**pi&#dGr9sKXJ|#R_#(e$RTcz6JR{+sAgYy=*tzPaenz zc_BaKiF}ba@<$%YCwV2mXx%eI+H^YHZ0Q*bk{1X7i(8<75_}vn`Ji`yqFF$6i zrXJzpS0yScQl~dGqkhTBiIS3%B&n&%l9rYt*RG{vnW`Qz-u*dDfv?PELHUbdU_EAe7&vKOhv@O~I`g5 zK{0@Jt_K*}0P}zj0P`H$doQ35&>x^Lke9*0P~Zn(1fbahxV2ofW=#NM@C*81SlAWK zCu3rwHC>XD65i`g4VAv>>DMGfA>+DaXu!6NYm#XIe%1l|==bNb*RK<4nq4S+;JIiE zJU8P4>%uy*Zmc8f%07YhX8qX?wukLfKF{{D-E2R3ARpv~{1|Fa{>US>&+q%>o%~Y= z>OozoPk#NCmTHMz{_;=l$7NwXlnvz9pSD*Lpe@oSnU_=sSXb7WbyqgPzN`hn*wzl{ z2;{eeud$8X3;+fq_a2NF82&ha{`{jpK0eQMx`%1L9v2s@`6T^Q`8Rda^ww)keRVpg zNoFS3({xD3QbQ{CPt~vMx9Ruc{+Zgk&Q3JjfIXz^ZGq?Hxf2uPB`z*TtCO+=)^)*x z1um5;RT_;pP;<72?PB}#>rNiX2YDes<3l}ymQlv;~+wh&*FFid8$0oxz zQZ$>PE$D25_MnRqJole=knn*WgocK^b9Z;o8a{mZ611f^+SF6uKDLwXRsODYCoklO zJdrQ*M*iqaN_+Ba829sQpWpZCzlM6^<3If~+@I_DV}P=O&+r}N4(q@;WM~8B@tJu_ z6@X(F6${v|`T*^lcHRu2EwlpK!dKcOr|yW~AnRviV{-z(jq(TkcgEO1bx+n}G5uVp zH+0V=_?bSO0^O1{9j{%Bm(;X4Nll4?uSTOEk3{|#0ffVEuj-HxCP`T5w&dgpZJ(5s zC~g0=v{;-g9%UtHWoBd~;~c3tm);gMJG+kWu4#4B#RJ+wtX5~{2j|ZR|FM1hcDFip z>h#6fu>;yjn`e93Znj_fJ9!~LjG5$%ypcchNIngOqZ-Ig6q_taUb8qf73L*by`#7goG;?16-1btNs!aa9sR79K`e3 zc5y$rK~C;pEhijT$*BWt>x7*=f9?_gI14pt!}I%>&kwCb!Qvc7Pbj(8&RrMspg-2@`<75)oQ9?cHkpa-q90_lc^|-3wgTJcn2L>sscg6W6Wj2a+xb2Bvzhy? z>|Hfpj_qC~=gvAxWVpX(56Q{lI9C+v6eriy6JdKgJJ9OHdZCWYA6aLP8<;0DelZqc zPU#6=1vS4~wQBWY1N3vYUujNW$Pal^{!aeLBehpPPoBv)wa@SO)P_3c*AAQhrN8{; z6ch(o2SW7+hIXLh0sAZ!6O=7zaiM(q@?UlD-o5_by?eJ|j^&T5SHrZiyYl%Db3xA z$noBC&MhuB7BM&Rz0YT9{;qtUIXwIScun`jM2rO@1I6q3PO)DxR;M|9+7^1-L+^di zbpLWwIk?hP4z1}eF6(;B(e-`g*v39`d{b`?Zku&Wem=ILkM{kMwY}xg>fYkK0ybi8 zDtnih%5K=h&Uv~Rv3=eEaoM#{E(M=N?r;V1AX1A5tQ+khJNvpmM*SHJ*e2!$><`#h zwi$26s%30!On;!>`86kRjHfEr|0CU%&ns;V^}^x*Eq{h(a$TLv&<2WNd;UHle@tMT zG<#^&s8Lyr>uTavi^U0QNvzuv>tB194@mMPG5=YOb>bEgnRpKT6Qsz%|VVva>U^ zwlFVXeyHmY!a1k>4sV}#!?gvIDpjg5zAJ9>YfcXH$9i(DxK~=_*9g1(xBlfX-_Q;W z;{ofX*#dIiqMv{Md9gBO%6y7A&o;0vpEYgT^viAAwteO6>+1sT(LY8?LOlBLROar? zsi=9TWasF#&&c5XUA&f?$H!k5|I?0Qw{)az)$6`zkx^ zh~K_Ga?Y`j1Rm%sLC$?8_+Vc-4}=^d`e@7Z2m5H-gPi(m`vqWs9_NkY&%$PCKW;vM9!lhPhpS&B=Q~x?uDF*k3{CQ#PoUT0K$!BFJ@$K<}d6 zx^*i$XwaZyBSwt)?9ib@6#@bRY|;L=cw^~%?U~zC_ne$80Q#dJWnQ0}iar58fAz8t z{B4dF=XW5^+v{~djCf5yr_a*AefRc}07vNTtkd>_OJBKktdCrA?IU4sedOv1;A9_( z0K)Z4e&%+5&;2eP?IS$yJZvP$sgL+O^c9~yy7+JkalsXKMtfr(!CZm)JqwwItgFl%tVZtYtmX<{^ zH&!Y*ICwtd-)qh96EJ2=#T;Bd?X%E_a=s=7WA_A%(fv>Fm+kYvm#xU@b|L=mS=0-C zb5BhN5BRzl;meuaI*zNI~21HH69XsdZ|IkIy;`lmo`oRERJh^(wM zEe;sk0OyTV93U5%>zH@=@Zm4eCl;A9Wy&YGAE7X=ITywcmlQ^hK+P4m1uYAZ=YMt6 z2X!h}u3RC^-4&WUcWxoXw!)YbFT8c@*22hpzCb@e1moeC)E=*Y)?$7}26JWuzsGo* z+9xMPOI*xFjM3M^=f4*E{2nV^UUqErVDUO+B}X>&gunNOulJP;(4AT-&125$G>-S_ zEs1`nlH?Br^pfOry(A@&==s5t+mixx`*D9BM_XYzvI)2QfN?;V4@7c~;3ckI4mp1Oc(ENj zb`;*QVMAfO2euI2xmsxa`0<4rG-&Vv$N#J9{tG9|%E~H)>+6Nku0n|Qg`oSVnVFf* z@Ht1z^`L!93g=d-J>$I&jQN`O$0+pmS{_T&CM zj>oecmPcENM)~2V`pEgirgCame~AlqlDuReEm!2Y!f7SO9vDBeKd_lM5c5fg(atE? zK$6Y|a&>l2duPl`1qbv4azbv>($X3S1qBr*XYiLo=pU*3-})~pDXH-N`}ZrOuM6Py z`8X}lN3M6B{e70?=4Jy~SVxX@Jq0;bj6_}y!g%{9**ufuaE!w+W_MZJM@}KP^G57F zhyFd~!WR5QagyYv28qAR1i5&;n1DZqM(C(6csheb6?k1UG-6ZR3SIG|V zDp`8`hjo0<{kVVnrS6)&upG@6&hk~M>aNE=ezcOHIT_N|$r*B)~;(EjK;^tam(bJ5QS z?MHun1aaPDjJ$rDCa<5R%hNka5`J#Igt&E)lpy>>Jv7g}44tobk?e@Bk`viQa-%!T zjTjwn#&(vQaX@SrECGDJLEwAtmlFv)0cbC@h3hCUIjEafKF{;?cAW0qPf{h!eT+7a z;Fy8=C*u?2)LwlY*fRSY@$=ZPa7;jZXUro9w1MQLB<)@#as;ldGBPrl_y6tx za&vQw-nnz9BkcMi@4G_VFqXp@FVjGK@W5+0DJk*Dn=XpuhS^%ox1CGvP30)^dk<*u zi?LWR#&DOQeS}+Yi8$3$p5DA5Z-2=Ka)rNhml1zjLQnUXl+cdQP_H#~zLn5PZY6e- z+etujXAO6-ypsaxKXW_xA@UM*_K^#FpzS1wbdnH{ZzLV%|N1Ch-aN-SpJ&RG8zIOy zdPx}aj!@)>j7#rr0OQ0x7$b5Xa?9**CD6-78_V#z0rLRO?)5fsEiFysh8&Tr+qZAG zNk~Ze+sFQ&fBv~V;+i-6H?#+Hw!C(Rw&&!a{Y17lHcn0DTzRA%-nmRR&Om=}fqB`* z@cT9Jd&GPn#Qb3R{iP$&-L03zc=VKb&+d{FvrXQm=4p8SJXh}LM$6T6KS^R} zTggl82#q_*-PBHU?;3EuliVlLI|Cms`5kdLt&`lr{CI3K;$Vvt-3^@ja3A7nqE`o3&j zSIp43AxGpYEG*0${Q&2L{CK+!z>`XvpmY_B#&}C%VUKcZqse&_YbmQ z8`z)6MW64amBaE_uCg;OdEVrZagylURpQQem&j8+<2P64s<+Zq5sR!I3ic%41O|_xzpd#0MgUb3;p`*uhr2G zgfv~;zpmwb?C+_4uIBrk=S@nA!?oEnvhk;HWeYg6$Nc?4YxJX-Yx4w$=k}P&1t<8I z3;gUvH;FslMH0O`OUk*9$jds&&v(M0%dHRa_C=oj_B2OsTnm)Q&_QxDwSzpq(OI76 zb(Uwhy6A8lOG5woDV9(2x@h~|!2Udr$Fm%k_u)D6tSpRuuD|aG z5I3XYD}T#ekiHN5oq=mQ{F$de7<nQ{r1~esDIS^d_O%M@eclYXz!d4Pe_Or*F76#&7>Y;^HWdRJHMA4!8N;6 z8+wWN_MQ@G-$O3!?X)PS~<9@18_uC$fxsL_bU`Tg@xo!-8< zB`+T#&rUod$t$hMMswJqit+DJ-38}tEfCEC3$a+LPSRXR$rLuc`~>nfhxx{2Gm z9&&J5PuV@Mr))9nE~_VW$F+>5h>M5;7z40B)cHj&`h_fwGkq@j1~xJDZzDb``uzFx z@~~YW!<_HE?^A!+0mrF8lA{OSu-*QnD_!mfXlTax1>A+|RNz(EKje38mYg%QwH=l4tiY z){I>*+1HQ@Jm{t^`I+1KeL=_R%KNj-2RV+IH)w~sg*K9Yu{Cm$R+8Y|8v3_}{%z&r zp?2uE+RHh+PU2h*w{&qt?VrOt-6Tq?5?tLN@ubDv9s)6 zF%vN$1+kHRfQ}<_Wf%i+t>hQvfz3X8@liqKeZ`T(u4B%x#(OI6Q-8LNW8H*=IN7~? zrYs-bK{ie9D0Z_t$-#x4~P)jsQOBA=$O(TD%?C`-63Sa*F{zoML1sw~*f z6U;d$chu{TehB)f1j05wn@iNm=5p1g8OAFuBzSL239xM~-rL&B=?(4W_^J+aaB)Ym zx9B8Wrgf5)V>`-{QRv@-e4#h;%@c&kt{`ao6mxnj(;lsD|xFc_=o95H`{hC@91VcN)F*ugK z{uXxg_9dY!`{4~oxs%pj^3d<(M6|~Bq!x((%`pei6ywt-a^*-93EkgRg6x|ihiM_+ zXv^vKZNzP5TRFTKZMAGKHZwZNn(^&r@$h!CZ|!`oKV-j1jucPiPK^b?X$<-W-beS* z{^?_X4_veQjq|+BW7(H6=R^OcwTqv5FWf=#Kt`Mn6Hk=mRnwI^5Pj9igJatZx^gFWz>fsghNbuWTCFK6DDihDXZ&#Qc2 zwTatea4+oYN#Dq_A6m)A@vX#eMjLUmY$dLXT8f8t3-MahO#C)Bk-)8unggL!$*%>&vTE%l?{ z(gq6B`af=auRZNZ*^<7@Umqn(9D!OdmU9LyykFUrpsIV5M~6!1h&zm_~c%wcq_47RkbaO=b0nW@0n3x$OO^nH;u+e^@mUk7bR-Yh``$T~kj2Hh{-1 zwdMTwT5{2@wuCv%K{xon{<@&9 zFHrsu?U@@E1kEm}zl!(J{TD)47R$YvG!^5%*68X@v zi9S+C;!o5=9?(Fp`8U>VAm>U8*g#8MyKgHmF&CuMP3Z?eX6~lNf2I9>*nlB)F@gJN z`zpWJ^w*X1E9ZP7+G*NnK>M`7rkIOrjBAngCE{2;x#R>}-BU}>?J&Xk)dbp`$mvxz z<<#<8a$-pxakZ)|hvzjAr#X$p-mJ0MOlmBvM>Q6!!HvbdKVse~7fo+N&Qu>jPN6^N zf-^@-`x&nox0LTI|L3+V7ei$3H%(>9phmKOR3q6rsgWF**-(yJHjq;b z>&Y4LK!yzs_y&e@%bp3tHc=;=gX0mv&T=f}2TtXfyadwD)d= z`N#%{`}O3?!PkA^>8_E^$sq*AjYytYCKj)mFrt<^% zem?w)&-jk}XgPlY`aipqAeX((xlKBy0U3> zUD-XUjyV5>*l1BpPR*++XBJi$&qZH}&(dn*Yh6YBS5%gO)s-Z0U1d4Hxr&5tuPT>! zeRWpSNX75!T^IXvepIcQ!(_Wx8<_Drn>{p-lOk#%Ir;5uUQbsaJPrUkf5*Lb7v zlRI)qF3Bmm1x|jX`WvH^A^lwRU6n2+BEwGn|TvtZr|Fnd{AxKGOD)NPpTyc zf2t+NEozGUJjBd})x^`Ps(3A~B0fthiSM#X;=jD2oLf~Ii%Q&Uz9swrDX!4D>z$RV>Da(s4mIW@PMoS9ce zJQq|JFRM!8v#6r@E-o*AOUp^X^73+Sbp;7tUr|Cf!w$A$Kg8K!*IDxD&UJbHlDg_N zeG7dxJJ7V$&*e~4=60I?O8dt+hR`2J+u=SR=ucZf8G74c+3#LGk~c5!%agn55`1!z zxUQ&*T(7ctuC9pjYXv#AxB|2qX$M?L;uR+hWO}#Sb5f> zym-znCth=nfimJfuZ;N2FC$(H%D|t>OQ`Eo`T0QxuJzJ4>Ce<3x>5h%q3Q3?<^$+` zThj{rY5UWc4foaW!`PseiKX7A{%N^dpK%ZMxh+56zmDev*2?Z_mE@pVX*q0GMh;Fd zBhJ&xh|@Gbs9W7t4h z@v?;GfVX97ai3jEjxKB}sbMGOw`aVr!*QH$zD8SWeY(=~DfG_|y-m>Hq3b*N`FsDT zt!Ud}Kkvte^mju$%3mHo^EjN7=hoMS^?HT-2(Cr>$X<))vTssJah_UQoTdPi%gFx8 zWo6&Qva)AFIoUb3ylfd+LDqaTZeCW-&MqTo=ad#Ni!$PA zQCg19C@G#cqvT$8490bw!{K35I^Vp4Be<1^(S6oUDHs%O)H1nl-_?; z7QW*-wXzKBa}W27=E&etS7Wj6 zXDnNW8q1zB$d4u)%kddy#bag}IRjt!v?wK~%}XMNR+6h{wqeYTdok(r^fk@b>6_2x z-RocF-J4e;Z+-)Q7kT?3-r@5*Y=8Iq74Smd{efd>AJm;%=ZCT(?#KOk9FNz^{o_BB z&vU-RxwU$*K8!p1c))AMFCJ#eRqs8reS8(zc}a1cP)hcWFC}(k%gFXoWo6SaWAuZ@ zvY?MKcrli-t&ubATw5S_KN@}Hq_T1x{iOTMQgX(;lpHrJA;)l^L}u(c+>6P0k62G_{&)cmsKXn9DF5%^ z{y6U48`#Sqzo0C=?=r7@pW87uU^~CZe*gM;UZaecC>J>eaun7-JL!Tr;NG?!qf5x1 zu_a~Km{MXhvW#pPT1KqDErWchtW57%R>rg_Bd7N7)bwZEBzKGhJh7CxPAerR%}Sy#D1~QfXW?G# zG%d%|bf=I1XEisp52gQG`YN>1e3dpp&Glehp!WP8`~6G$=R2ns7htzAd5UU;~Wc>j6!tfH}gxGj&N=f7Z#u9aQ3$EdZ%P)76aZh(T?zPLq zJ=(bEj(BPS^;!GwmhOA3t9`jt`0Y9RXKJs-YUqnN^6q!$2Km74)Svrde;)TA&Wk#{ zRCt2*CwR8t18^H+zlK|wkIs^xabNK>JWFyv3(xhP-6^)?D$15&C1lOO60*2&2{G?l zLMFEUQieA!A>pCtG@mDDhOyJY|1W)v`a|177^4K{*B|$U2tj-19KgfLo!~aXjrtiOymEV3L3r)WevreDO*cP8_`e))jVdTn?H$(r&^>dex zRe!{ITf_X{upP8zw&SFOjZA9xnat@}T$cC#T(%DWTpW-iADvoO+-G6jZ&^WnG5_n2 zYXDbldP=1IAc?RaC=t5{NQB)tI$N;)TEcDnLTi9Ff#*ZR6?XN-K7bv8?O2B4Gh>S8 z_w;cMi1pO`J=AvH@d!i%-ev{qk9)%3=Q5YhdCRy)#bjpN zPi1MZ;?TdOIQ)n?vl$gI{;!NVzpCPAg=;Fy>r0fwFo|~fL8A8!lc>GlOXQwGu!rwp z3*Sm4Y#{>gebImz5uu>Aak(4&=#d^`uh;rRoj7TPB)i5|kS#wL%kpnaibc22Wm2nR@L{;Rjjk}f5Wz9WoF2XPE}+^>(69~X-V1gy|Flqt02d4UBtt(2Il)~O287_@3jff zQ#y|Yew5e)qa}9#NQrTzEewGz3LoONU+TS+?%LFAeMpK@vfg8gMlFu zwDSiE-Zetb+l`js-D7Y)u!Fdm*OWsuO=SPH>arVi-djdgloj6@Yq{UVR-enz`o-`L z!A%8nMlQ)IuC2Vq_`lA_sz1gm<&opxWrtlEfdHAaMuBNvzWt*aGcfIO4)k%@(5feJ@cO zU=#aj7Y5if81{kh5j!HVe1bU{o$iLY0M|eK{N!emT(TSd9v62F!o5d2gkl-Ga}e%J zCI)K=-ub-*?;I-Ucm06p6-MHk^cV@U(l>=tAanFMZ zu6d`6>W}%A9O%#YXnvG`sAFMhA42_^ z>v8PLHn1(q251v!oOg*?2V+^(r=n~eifcd;Me2I9N4u>`JY48O;F6Tm(eP`hCg3cZOCJLHMGM@p#uXt}U=oLt;D z5zj15#dB-ZaZlh3#DKB52c(f4vC!S?WU#~mn6?AiaTq3Mgu>}Me6P_Q7-SFDp#QSm4h=S?2x&H9iAf>oqoc7KrL|pS95VP$GyMP8p)O) z>xnh*`{@0ZOzMcdVC@1eM^e6`xFUz-l0Jp!>1TYDj&D#Y)U^Zj|AXUI&iOExWm|B6 z*!vj3?OSL5APc{)iR+*B#BO3^+#}Ik+^t&SzE3>&x3;|`yDyT|(~AKtJr+vJskxGJ zVvZyqpM`iZ19mV~64*B!o*?lD#{=UK8^-DYyNJ^x)`>QvL-c_kF&58wueDBl&I$dg zKfe!m{81u-2q4@+hpSkI0avu;Sh?&#jF-!f6XnYO$#UhuR0(r7!*idBg6jb-P=7UFE)8ux#< z$GzX3aIYWU;dXYpqFu@tbza{(a-m-iHxMcwi251#)v>nQ8TwUs49TFUJHcuw^6G0pc4IU;AwmB=BvMC_lheC%Ui z!d}~;&cCS~&`^J61B?N_M;&Cv4{c=2_>Qt~Cj7&yr<}pNGXpk!ElH_m_z9RP(JtnA4sEu?9J9pp zi1Q`(#3G41wG_{$uax*RYw9* zkWF|e-8M<~-ytdfws#7Gk7#*O zes0JaxuXx^nX1!>{U80a|4M%@q5oLe!Q1>f0QEPtfzY$AvSB>l`(=jrj4!}Oixv)pFLtzi+cSx3I2OD7rYbD)#rCdjjaP16Yf<9kN zb)T>G5vj-6^|obgVGgLrrJA-pr#S-f{llH;p~i}RA9VrMx>HqRKK z-3K3iG3b5VCnw~F`m5Ma{So&gG3Wo$&iVY6Eh7G#L;ts`4^a6H+iGY7sc}(wFWPXt z|7i%`|1@0O*Nqn6ZDa9V^)AV|>WZBGxa1L0u2>$Gyoe)m^D1oNvZLf)+$%XMCIqo> z*reqNT3ql!4v9X3d7~B|JeTS6N7%)^v=He38v4J}mUr89d!Hf9?7s)}qkLZYGUO6fmVaFuniYwk3 z?J6-|yToJDIK2ODv^XvqiTBzL#rtgs%kkYCKG5Ef4;B02KX38ghf#mU@yGlf?6)-9 zaGiF{T$XmN`Z>HKOtS%H^Fb#Mik-!9y!&yq9A7g|Jhx&l<j9jS+c78QcJy(%6?F`@a9D1JIq7X-kKPt`A}(x0T-c)7L#99S1;2G#U%|W~9b<{C zi~Hp*?L$Z-SYIYI0O)>(u%I`0qYWj1Tj?bAKYv8-gAmo`r z$T5PEb3*T|5ab;fkc(VAAURhKO72xWryYT~6mS-UgtVmLVHVA zw3j3X9g?#ogOZ2%a1(jMP4pKx@a*Od zUrV=?N$59 z3ArIh>@Sh?dtuIxzx(vJ|3et|x!<#rtUo_NF?#-Ai#ylco` zv%|Y7K_~9X!sOWI1^B(ax%fTCg%Y?Q zdD~^^ka|h(Ub~F<_Fs_)8CT?C<`sFAc^SAQk1{UEqx4XDc>TOQR5pClK z72a|mapHcGr`$^f*na?jyn3!J`B~eJzU3alxRUIPSmUq7n|t_;nR{u$_zi&&q5a*z zeo^kFUji=UHv_K7ooiR*cG?xZ7xI#%T=Kv($=2e&6Tb_!WxhDAwGi*a_VVz-{rB1% zw%M@Xho8K~b-uX<)cv1WMY~F3Oc+^E3?LVVd@v5AL|>5;+n0&^j>Y1=#~Q!AwqCMg z5dW`7$i0k6yw5*M9%V<#FLIzwZ&87y*} z=CFfDjBzRkP;172WdnwGa6KVPPTQ}QGj=QBPpjp^v8|F5cOHDl$b;-yd6*N2-*5nK z#Nl^Xh*){5AqMMF@+234zE5BqPjbTLN%mDBOoL`4w3GLs-ROb+bhbpBqHV#>a-wmr z82oNTtUSumotx)>kR2!YvSQ^vf```L-mKT#gtNRr1llkvL_Iy}QYJ~|{~nV?5JmcY$8Jzys{|G)OG12BssZ3jIS z?>vp7f}ToLR8-V}sGukT0%8Yj5exA^hzJ;(L~0O(Pz^owFa67xN`MeTOG0|@y^#b6 zJ%DtW_kW&E7Cs|Q#QVK73s3gj-PxJ9&CE`DXLMVsKl2*TFCYWIiA*3%4r<88xh}o^ zJ91=S1?F^@XTvuwONvvsNThMTgsq+-Axo!;|H3KQgC1Dy5-8&^Z#7ow%m?;tNb6^7tBtLGw zCj0%Bd2*mCA8Y>PWABCn%(Jh<;a&OI7lg>G!%m)a%6zOhn-AS%f7-Y(`28-Da1Yq&idho6>L=7+ap(1}+qKRYd1c<$%YA$s zagU5Y_1cct(=MU^ruY^x!UmLH514-o>Na35uR~}XXD39;Mz4jk$!oquLf&bc@I7CI z^0=x9{zBXXu2^b`-Nka4z+8?azd#YLuK=v+!7$qUh=XxyK)j6q;Lc(>P+hF0SzVYW zsS$qA{bGsmUMTAg^RP#lr|haMKd<|X@j_npav!o#Xy-3Lo&P`4*1K9eL0jMs*g!bT zfQEendVV=}?3fVdc0k$2hnZxv?-GgfUncRP=xa|ve?Uc^9NbkVhjy32*Qy-42lhY@ z^*D<2(Y=?(5j@kw>Sw-BFup^(i83vX8l;tvG~+@I5*e^uHW}Syliv!-NsblT>E-Cx zb+5Pe@vZNJdjEg&d+$oNfO79H*ubW%wE^7@4x{WW%gdCwbw-H}Tm_#hcS#BNm7?S= zs1s0j*X)qmJv*>YM5Tr!`znPvdKJ*a9;ttqan$aqltYNKraVsykw!Aoido|!kwL3v zTYy2*wnxa`nqBAhPC74UyBE>D31j>JxRRXym!HE1I7SeHdO(?%19~1Zmt{kl)*d>j z*+KHAVB|$FNnGzMDI5JzA8){ZU&*qs3T5}+YB{`brySW|hod-O0CaW$FuSLPLwLT! zcNxbH#FZNxj<`*d7-p21P;ZG@>noXYXp8K`9s>2>tvY@BnY5p`+4?e{Jfh6!xXvo% z`@jD$>-K7SL!0pL80TID8#rff1Log^x=hW_7i3#2yResHPCWYEA_AeO07=^%BpF+R zVGH4Cmu(l;)q5)PW&h4nEk7ULw@Z#3sL{aRhiWRdXADzSm@XxFCl~LgZw-;u&B0o{ z2^;*epWu2a&PF?MZ_Oq3P8xOE$b+8m`Mn2wKZChkQ&8XEe6{lVD}+OyxB+7xj%Xho zyOIrHUqK-*W($YV-m1plnguB_k{P`L`U{neZELYs*gDCIUN70vVUn{Qh}od!Y36JG zCiskDG9$6Cc;q@s!@Fr)*GhKW7AeJkeS3CQY2S6!t$ErF>A95dQO?z(U%%g9L7&&k zBl^B=kVi^k1B&0s=KVwFa@S?9pUw5ba0K%n^XaY~rC1*_6MOe2U~L}cYwRzb9lJ@g zwui$;!X+zaqvXVGmfXZhDM*c#lB^V|DuEyFPSpLVWAymU)6mn^&-yclWqqKx@#??x z73OecBi}!Nt#bJ*y@&dsIqbj(^}$))23UqtrslfPWq+}=t`GgJzjw7~{XPABu{891 z45P=Zm-l+#9@;r)uz%AM@MHN`(&6>s6?NptC<`i3Z)#w5StXI-!Q z`M12Qhc(x`p6B(tp5L>WyO9swzkNMq@mGxlW5--qcs9NZjx;O_u4D%%PMna7LDyfy z^TopQUc+#g(!1WLhkE`X_C212w%r|nRlQwrzR^~C0(GbXeTK&`W(T@0==quFhI(sw zt?Rqt{f5`(ak9)|TITs(FY95eN08qaW6tKo*PH*w`06BpI-xbzKwFIV!hy@#fnGQ0 zx@)M@D_!gF@vPVF7t=f2bZEEi#(tNxq4!4{_4J>S0P2K$&~H8zvQI``dxm+Cb+7q1 zg03%JfBM;6x8}d~=lWTH-#oAD^*g@nPQfQ*JC3g~c5~N%hU>;u8cG2E@P9*@@HECw z$HEp8P(Pe7FBf!s;CDG;uG5SC*2C%F@qAye*U>IHhB_z;b^B<{&w3p7`=53%u7+~@ zD_p}CZbZ4z4E4kN$df<99=4(VP-EVXG%qKpTXXOkLHoh?=2_I|)%fn%h&ufT^xM3R z^8HS<gba)qZ`6_IoFZ4PT_AmzZ_IT)eB6K~GzY+F3d{cab zx#}OIFZw0e&>egi<;kCVy^a57{WQ|vD1k-^G)kaR0*w-Alt7~d8YR#ufkp{5N}y2! zjS^^-K%)d2CD15=MhP@Zpiu&i5@?h_qXZfy&?tdM2{cNeQ38z;Xp}&s1R5pq`z3${ z(609psZ(`}7PGT{X<>0AZk)YvX<;!*q}KeprAVatHDYi!zrInF`K5)$udaU)JrX$i z%kZ6lk(`A^%PU=1{PO)f{?pf2qOalehWt0YZYV$b1%FL`CGTJ9;};1+U%yBI`ZQM< z^=%%2kIe#TkIk-;I*$t!UG&s&JCVx*Jc*c@VX_#n=#n%`i=(AwQDnr zxYD(jAbEW5I`n#yU8a_9(2Tk8sx;C@7&a{Vy*A4Bn;q{g5 z`$7h)&kyGfpgzAeygn}w&0Ne3SYzMpT4PV-JcISu=Nae>Zg89@c#?K`Aw$+*>IBx4 zcfQE3PyT#@^~s-4u%-TfeFBs8>w1ZtzpN{B>bd1zO=ah!s~36a3*og0%q|5YIZqAm zBgIMP&l`X15@>BMudMhX1E65!nGKLUR+*_Vu7=Y@N<5cCxw z=qJLxy9loPp@aK6n1ky;T?@?P;BP&CJx!)h==6{_omQTI&!NX-&aciVX(2zi0$l5c zYaQJN{1dnxpnoi3X@)!T`?o-USC5PFGJb+-F-@jT8b}LeL;7@in9BmUf73zUb-o!N z@{0n%X9uMOD5YVy#tZ=QP;WLIO_%7pNeD?#4n`zt& z+zl{&%2byP!&wK{&Cq}AXNF^Vg6|V;0LIxGU>dD}M*zx*@}W$q16>x>kuC?_9>^DQxuYTP zjDvB}2FW*dOr9SG9tTJh`6C}M0v&-)z-xdF&=u$gbO(CW<8_>SUIe^PFdW0%0(`d% z&>46Ycm?PHyZ}&6lrd#OdC-Qa6I~9}7xM;fmom7L9gugOe_ii7@3b|>%{0h2^+BFV z3wb4-M%{bwy^}k4?!3BRzkVCWj~|~jW5$f!xpU_h6a3A4e1 z+yg=~($fXLi~!E~&3k;tFwxObXYtMs zyt@{0OhkONAKJnIz#gETeE_@zTyOpp1l%y>Zvn@!rh5N|14PLT+OMoQBZ$;nB=G?3Oe zH{EoTHPUVaJOMlkFwav*db!7Zuk&tR_TlzrkA^a!9WWloM|+|im-m9lVBEB;JFKm(o3C26YB|x@wfZ#yYHsfty|yn#v5<^GbAKrAG3W=5O@u9_$>|DBfs|s?5+uX z$h+cv0&5aTS!oIADbdRFsE7dA`g^i$tgZOZ0>2^Hqcm=%MESlU8SA=Y&(JQCztu(x zqdQA+%xl<7@B)gqcgDVwHj=lwi)4j&mBgT~5*095Dl!5j$hAB68M8$h(L>gous2~5 zWk7o?k=)!I=7kgZe&9FIAAZ1(z8UnMkNqr=C&_y$%IbWH!dktqBi@!3<7~0NQ+L@6 zoyYsVAtw)TKZRJV-TIjn#dMO2u?cnHEUhE< zIeHm;%)E^KX6nG-dx~F{UHKicUgdkTyKIY``?(U>A-P-nN>V@<*=Den5ZJ-0iMBFl zSP$8X^@>>@l7`~qA|XwX!_+^Fc|?qL7Ft2hhaqRKjRcfQPIdBu` zl~|9?*em@N?3LURx*h@kt2LZGS}8kALZm#WC-z|Ignb)2$q|6(J!LOr9nVqn^Zo+x zU8RLTwl@*^p@XD{z9MnHugRumT_s>%iIxpqE9-7>PJr>C<2OBYr?Sj26ZDi}xS0%x_gJi6EK?=6Cm$JmyDBrWUYTu68$>J zww0Y^t!roTnAAn)4(TE*T}Ei%8asAWXf%;Flufg)JM)MTYvkOGwRrYmO&*PRrkS3a zEYkOyx*+^MxvmH7{R_$$Wq@F9drjVcOJ~WC{a%vgBRWXn^lothCL?Yg-=St>ekpl{Yfd@{0#P1Y=?dH zY~*xpaebKvInWj;3o~eZBj0P~-qCXG$Fm4^M0@PHY%S?)o<^DSq-IKE4drnNV4A(61nU#2}j&(etcX^ulE%yZa7bA-m*xuX@DeQUg**dvO79BA{}Gw^?GZz{eO z+DiVWHj)<9T4Gi|DiQOnBy?I!37lvpzGEI0gX0sB{gX1`!>44XVI^R+QCj z3tyK5{C|XcOhUx<9oq-9nRr*9shU+8#q;4D%sm7qt1I$VhxYV=7kT*y6OLt zzzO$@@w%u%DLD@fp`HJ%0y?eKo$40*KgiQP3 zL0K{20r4MpzpNQ~udEw&x2zx2Ov1-ClXa6Hk-Y6IivLxZ38(+ zVb4;~MjA;gzEj6sFaDwb`P4b(%QX0VXy^N7TA%x5W&al9KlC114e%)MFe`P5v z2izrYU*0W0eR{70bHq zy}MTxMf@JGG{KW9#eoI5L` z&ixv=ug;zVU*MJU(PRW3&m6})XMP=?aqu1N<*p#NpZAx^+JysU-Y2)q%n$C6F+D7$ z-}9D|5{tcC>-ShDt>B+y57*^D`9F|xQodL>PRnCX?^?@v+uLNxC;t@VAWMl~(NFT$ zjKzM_i?LVzA}PcExsRzhqsk!y>64?Hn&ONXK#~I^f8k*(nwlS z2Yhl}m`5ZiQ##Y8sDJM1#587o^MQy?J~GyQ~Bz(hc(@kHquD`p?{7~UY7&b6e zsL8;8$qX6Z>2{fK*IZ1)TFQp+TS<~zA4xZSA!#c=k(6a0Ko-4#_ate_yJmO~d(E$^ z(?83DdYt-spTsPFU!v!~Ba!pok!`N;$d;ep)v$S9Z`r!wBiXj-GYOy5OFV}@DoY2n zlpo)3E)Fl>A=3tbq|rp$NF%;|j)Q;pgFC>0>KZ#rZ zrNpfqAn}I5lIT4`l8jEkSn+l0AWMflFF$#< z$dq>^d(AYI`Aej1+d?VbJXeZ0{3wNMCrW;>6J#+`a{Y%(uIXFJflLZFOh>hLQmfav z4{;{)1d-wEDCx!#Xg7Q(nStZbk1$1Y*3Uvaa6amQ#pnzCQG6!8BlCy8j{c9=r5s}} zq=ht*HkA8Yt}EkakwFF@B7bRnd+O`yW+iTyh2QoPKj)7m!DqZw#CuC+f|pdrua=7K zOQme3I$848 zPnZ1gxl*_VeZ5gDrFgps`pvy0%458EO#B%AU+>9AzttM=q=k7O^^}6`U$=IT8SC0l zM`=Mpx5Fxu<^Qk{-H=re+0zylA?&YQWQQziZ;%c;!UnnvURbPMz4^vI8UiaGD$^Bkd&u{ zNMT%ng#7f4tQg-<+-Eq+0r0HTK$_6zkHxn?$9?{=12X7>I_50v0H)3Lv=8jxCn0Xw z8+qmsj9a+M&g{*?@_bL;CfSo4E;VS!?am02U8yG733*f}dPo)QV|V6SId-5*cEqiQ zy|`<3Qf$mSE z&BO8dKm0&iOLQ9`4aDBvyJRi)FEczpsWXa0e5Ad`8HHlVM!zHd*BZ1Gzr;j5-&@76Hvp`L*~ z+%wUa$Gx_>=Qj7`PL)IW&OKDY{oBpKece-lG;QB^zPGO;6XOCIl8>?QuvIRyX2oo& z#F&Sk9@D1oVe^S-^KqR1KXyO{t$-SqyVQeDQ!Vt77PUb(d(W4g=r!OW7kdjAV*lbo z>^-cb0Da>6KGqk`wY#v#HNqW2_yg62*w-)@`__eGZ{+zB9qKFl_Ut~N4%1}XsFSMj zz4zdMjB&GsU;{6r&2^AANP5ioSE$DLWWolMB!&4%c~%_y_)9VFQGtD1D=_X+fjwI< zfY$_{)gsIR?B!U7cM`&Uu|K#c_GL`_Mf|$mqb%ll#It53{l|ZzENBNARGG^_-+SWd z(IZlYc_7?}j{8;?r9@$0>rB~SU83z(d1OEKssy+XE${Ie!=xf?>eh8qnwufq6IYLq z^3{2V-plZf&v{7y?+5JQ0U!o4P`VxHH1kZlwf!40kEJ9lN%E5-u?OcS$&LxvkP{n$ z@!oCPK7Koja%6waZtb0han$q9zTGWok8w`spXNZFcoV*lCZbF{p_hp|{pNep==Uyl zzF1h6EsPU(_A_)o2D-m)ef!s2fRMoxKoH8Nv(y)9r~b*WId7Ny&9Hoj?=lX^^bGoR z{Q%DM`mZ?92k{(ibUE6IP+eUeNuG3BTulG^b6wxCse{n>5|nX&ihO=8f=9dgHq_6( zabALN#|+fhCn*o=)f~LXyaU@#L->W@`(4!O*R>DhTIj#t??47OAs;^mIdnw3g{`ykqTPo_p-%(qCMru4-o~=7i($lS58v=xA0kK9z z1q0}3ECLb9YyFHhCA4dtoq1nBH#yJ8`FOSGzvF&nef$?agPsaLH#_U}Ym9;){j4$E zS?X5hm?U+pao|KkL7v7C*w6%^Es=p2I`dkvI+AwOYPaitU-RGApw=3&2lRC!oUi-n z2_kPi{zwVv{tlX-3KF9K2ip?FP4%EJkPh~hUJdl{JnQl3@#;P%*Xn1|kY>iO`&zNx zM}Hp8&j@Erfc|Xsy&~w3XAb&45$5;wuzZj2(|3#h0gRJAOiV-f6(UbMKcwqQ2g8tW zzDL=Z`-hPR`eV^&l>M^w8GIOc6kyJJ9C)H0%zng^^=JKBf6nl&0KUsOIJcetclQDG zdm@kIiTqG@lp*=J91rHS^BulR`7l1l&2&h|GeA3lezhHeP5^yOY=AC6w|dy(Y!2S< z3NQ?P6zL!N3eW+duOf>Z_Ggec%7(m9X5@o%q&zRj1L@~;hSO23?qFOuo^ z1l|Gq0Gt#33Gf;41<(($2fhUUT@U`Y2l(uB;8Va3_z>s~ybbgK$QQ?&*!N66sAt^| zjXY2fw2vl0!-Mx}H%$TBBJGy%Q|F|ea%LL6fR6#vIskA0h5+9J!*99emha&2HLYE{ zc8fZ8?6{KPZ`}Kl&k)8D7zPXm1_9)eyptF5(G{RhsOLuk>VbKMGQFG*%;~4j$T#hh zvVR_+jl2O+=9C|0%Jhd=T3R~w?%mr2pYOcz@bI7E>wGRHIa#HqrVzs4yvJtfj^z+D|PaV+4Xlt|~<{$F#FhD(6124ia_cc2^yIGqyMI46T zr~+N+JJ6@2w@ogsO-sVX-o8UCBe+Hc(BGt42%L)d)21E140g*a*v*G%{U(mt7w zDXYf;>YlXgwnMqnzDeVy4qXT2p(&u}W99|g;X^o{admYKrA_4L=V6SmNJ`-Im7kw3 z@!QwQ8qb;HF=wzW#k_;X#qD~+bU1^$ifF!t(ekRZCbMo@209U zjGaJtDl;=(1qTPmwQbw>A*_+`54d~M21qw)pj}h$SE3z3&L0{d@Fl(xYcKrG(b4ga zO*r63qDo7ONxRBOixKbnqtqg&?#jo-mVTLPt9y4vKh4B|?kYLhmZ0yZiZgXrQSfDj zAFJ}8W2=_JAJ8!KE0vnK3G;Fb5MPnX%E}bv@rYr=hTYz^Yu6i_HEX8R{9EWg&w%~| zWZ#&WnCJ@s2c$#Vi&RqlX5}`iudJSAtG0L!RPk%SmxO?BDr-YmmA9n}eJ)i|bZ15X z%fhJ63jUcYdt+BgUDHiP8b47lD9Jy4&;@Uh5Hv9YmpH*VZ`IXl-$ zyQ(w#z2ATz=b4g{Vkw7jXG%gO?D`||m<+!<_zuVW^^g<$GNmHNM{=Wj$PW5ZX1`ogz*!FO*jdD;6#=NgG)8gs(hjOY= zfrl*c!1D5U9T;o)f_@_v<>kZ(@F z7=bz3C0bfc6Z6&hU2@F_#{&^ZJnTm*;RBVHoFFa(yUA)N%*&hI5pyQnOPaqm=2EqV zPgN)Q-W6hgI{oqJZ?+R-&tbBw{3khexT4NKjs97{-jdOnclErag|wCE)$qfZ+dW&-$aFDg~AN!2jFs-%P zHt#V>G(4tq*FL2x;;k{a=NkndFYo|=pX0j~q4_-W_s^AzK43?71*(dab}A>ljl_9A zt|Aae$kZp4k5d~p-vNGKAHSe-voo|bnKtA<@S2FnOVFV=#wxXW?TImwGNI4YvdrO0 z33Pf)!kizL%`Oi~%%YZ(=G#j1=c51m>Dr-1fF zT(&N@lC{$w68~{l;yJRFEE?2W#`bwqHU@fWX)F}ru8ugG`H}-xN?hId~nfIyKB@d}YldbHnh*D>1KlC>|UZdcPqv(Ud-)B)a z@&56H#VUQ%D7AT!m8^HUUj>Y9fqB07DZ|hQ)xrS}%lO`pD)%`PwKSPF>}UHm(*HNi zXQ+VfD9oV{-vv|EOl{vpyBtawWIG*VNT6t!^C@3g5THb*g^VI9RSVyVK?xb z)9A-F{fkIH^8kHm@ciuYeX1bALrfD|DIfUfxDUQttsK-`EwKNWjC=P%HQ#A~mL}7N z{PVAse%OBvWxsp(ZgHRay-bErj$uG^2_ANLollUHl@xAYC1-!8-MH<}f=gO{R@JP zHC@n;=iI4UggL=yoY#xzz?r%*r;hBDD6g?N{6(q!70+fZ;V z^h0+|P-b#&zT!N<;FYe*smniQ`DeGQP?SH#o93$B>6m+&9;|ky2N9f$sfYum+d21K zog;7XLpgWmXLW3UxvD~#%G4mWBL!tpav;`h2vX(A!SKajtk%x`M9t{)PZ{>wZEDek zZ?rU-Hsl|7E%fsp=8`@-H5wV=1!HouSBI{$t6eSD&# zmVPN4X1}La4|!a<*gYfzU%p4>q$O!-GHqmEwvn&tfc&jtM`tk)fpaj_svm~Hx9}+y zGPb=+^KwvmL1Q>)U1fQHrRd9$Zunf0pS0DVN%{5_pdb0-%nA57=c>dNpQ(gppGn-Z z&s6NPFI4P`{u1y0jY{+xB~i-$dvwXk#>!K@WniPgkQ1B^G^fSo~7^#lb z6ha55q#$g%$}){oS^i@rD`>pR37w+yFo(Wi%M!F}R>-zh->GGz-&2de9{@e?Q%s9# z!gl&!li!n~G$zz(Z8AD44^I6vFnd7!MH_ci9Q2VgFIknBwHld1%RR7Ar^V#`l; zegWX8CMR5uA3`1sgTKTE_>V-)k>ah3q!fIYC-_QvQlJ#aWB&6`gVA>WLh>_Hv^1C& z%I-4ApM40w`vCoYQHGwQKLL191sO@oG|N$?gf5m{xm(qq{Hg)|htyF5_$ z|1P_4$UEr26E>L*+dMzlxe|RJk^ajhcl%n|zatm(qd8AnpEsSya_2m1cjn=Fz8tJB zkbOI_ejeV5@LnW^St;k^W8A1SQ$aiXFR%Ln9v*^ji>PDFHtTFsVK-&1<6s0bCt`p6IE3~nyNy-9Df($es026Da}b! zq`Q8eBjupkJj&{Apq>40fA|4CG)10vL3w(bHrsH{gUiZoC(ga)qaythM~QXci4Im z+8FHf{C_=ARzD41W*~p(!miGc7t%;tnMe4W0H0@|w`|CI8p?L|XaA`V@WAhl`#_^D z>|y}u9Y)|=pg*2<1D`Em=hwAd{8I^_k(fpaG)mwvC4ssLM*7qv3kyZ=Cs|n3^4!wG zqJn44u!!UtHKcaNXw+rT2v>pN46nt5_s{Ygqw96kpv=y^FWP_YkDmUe&RRU6?@H%O zy|3R#lD|mzS9C>U(2aCefDN9($Q?YNgjje+M3ZA8ij)Qd}!Lc%e;8`8jQ-e=kz-?jocUe9!12YLf`z!$)mzyM$%FsL5< z&3ky0*8WV?Ss}N>5!jhiP0n*(SXa`W1)C+keuMN}ZJ)QrS_}vDe ztb1S%!~1>v_VvWrZ7tVV=epX3@To2=iVagIIrR1C!Tnso42=jc5W``SYWP5UYbNj7-fyebTPS&0lt$E_o-Ox z*c0erwC;a!ZU%#uQ`$Bua;QBjdQyLIbkefQmWH^tlo^43uA zTA~gAj2JPZiHC>BT^O4$}2u8+ztP^=-c%93~NGn6OOraJU(x07s=Vw1#3Zf zlWkt_$XfJ!y3gn*S?RIb91yH^T!_69D9_(?V0>%^*U2r&&z4ow21o$b8;>{I%F*44 zQkphVDl=Y|-FdIb?tH9CoQ<_;lm3ly#uQ0k|EYw`vy~M;d?I-{sT|+q`ijebOX6q` zJcIGsGw7-kza>+|a}w5eUiq42uYU=BF)KKpEC;ImWq%dcM%vX`YRczeE!Z7$Vt=CK zZ0sP>9)LOL+?83@^y3Ls>`@YXoE6KW!Wo@;HliB6sF|h_!Fq z$X2%}BzVeGvSesmSw3}`w#Fg)i!0G@X`bgVN)FPsLfcm{{n>-x62p)uWbFhi**xO` zNm`9{1H;;)?+9x%YGZ*IH{;kDesjIjtO%@SjP)1SPJ2*{V_J&a*N@4>zAtHOfHF?- z^{~{`c#MhFum8s}B!XkvpySxV zQrYhP1=c;hTil1-D~tO-Afw-CDMgqM#W*1Ymc^GnkZw=%j&`Yh|9W%r8hDR{e0vwx zsQp?_)?&TklN>+6*bmm3)6!=8T$2s=v@s!!$z@{gnN@@Cllh-Fli}U&m0Ya-#5lqK zYnM$N4-u{-)8-h<_pkp;+`qg-64AGQ5MxgV%M;{KJ&qs5ntAo>p`AX4afq@6tdE_5 zISBExw>VbP*3K5!Pr2UJ?NW~UH;fbMvmUtYfpB*N+PX#4zj#$veTlUxr?i!nFIqMZ79zqS@rn%5AF_hTKRpoz$Xvt{F=fim+e8!5<2)A&c3GUjU2hy3qF zpU)BW`AE!K9|@f^Pv} zE5aHL952mUkNRK>)-H~cr1dLh?W#F)5dG?m1O1oZUUkCt0nrzE1Nu`puEcI0UN*RkvUSbL;ccHmx4Jl5SQ%GJX19q4g0 zc;{TrYd%23EwF`oki$8>KbC7l?5-@8>e74-yLXhx!F_wr_rpTxXTkSe*!<1coN$8| zF<$iobg~9?oS`g8OMM@z_M7*R?rDSzM!Nj|{zDFwELw*?&EpqvjD>pwP;IR4El4P8J;-boe>G= zw2OG4v4PqGum<#g3VXIv{(ld)#n~QY`N8%Y+l4I0*goa_d2{d{pD_%>@|{byb65_W zw=*BaHQTyuYf=Pk@2p06@MEHtIP4s3t{XLmVE-9fF}U*xI829;fu&+cjwEHRx3v$HQb%<gX=a71$l2_Eo&6 z4peqm2daFqOmvRQSl3Cdo!3SA%p0l7%1SYxK5O>nBF8w|Kk&VFC^ccbTJr77vUz?x z$zJ;`<^YV>+IpuB7h(QII_J(}FP2r9b80OS3){(J$Brs~TR6Y7523wx$y|&&%ogn& zF)f;+JRF~v2$x4Ocjhrw9^VCf8I+?fQ$J4;ZMw?z{+J8&IM(uiL{@zJlyaXrT3fpw zZJ92Y;sNvzK|L%>#@H#Jp%2RXvCSoN{xfP%$vV)lubIAAn*&_CD@ko}v%y@=d$BhE z18U~iZ)j`6BmRMxiXXI(!gq<9_f1db`!Kf*b1M&Gj^Bav*Qbuw zs3^lwwea(1YVxNqaSkQtzYV=q{P=eH0^dC1Hu_`bjkSxjO^#R}b%Nqrm_-|=qV07Q z>){?%CDF?yJ#ef_^&cnkUL(Y9U@JAPKlTp5yNLgibHovUYlKl7eU>PXZ=O^co|vcQ z@uf;zX{SMKTE9{}Hpw_yHI5XZIm*X&Z|xv5fJl!5ui`Gl$}OqbH^WVL5kl{OazJeweQ)-P9m zpbX+1pCK4;-m8BsoZ}5`2C!=+950E0{QoYFqdZ zaq8g5f+b)D=-**BtUWJ(&|V1Eg%`u6?1^DnKd?XkA%JDclK{Udo~r-d3TJ*FUF?!Gmwtbkk zr(w|2F{n%PcdlN&`d9e}?dpE1@zLVx)Lv4IZKSqxp`1Z|dHP7P9If$}WdCQx{rl&! z&&Nit@6EB0UmU1|m#j0)71M~8SVO-R=I{B|*SovXudq=zy0*cdF%L<=lBt}py6_h- z*5%{;uKs7-wm?heF@Fm%_2>XUQ$gZ8$XRzPnBMsl@HOi-1)z}*{FD3-(Vh&?bB-Y)H zLH}W#WJLu@MNuB>BA1K3%X{z*y^-Y|=j!d=Q7rqaN@OpvtGp0>Kl>EQ>jen&E8RDY z4`p;m#IqdbW+IS)`gIxhdggcdpYEtY52iD6wT?)%2rh++TP!SylNf=j8@nbuO;AeJ zovkcf7W3NKxvtD<-Lwe`5gVKOtFCO`Sfk~`^2mr>_%{nHtNJSxRX16*KnsD-Tu_qS zY+;FeymCN3txJKa{AmwE6AQ~Gkex?@M}|j%M}j0_(u9>_*VGV`dSz*jaEi$qm8kb z(cb82bT+yg4aN{-q%p%-VXQS;_*wc{`C0qf`1SI$_jB}f_H*?!_=WgI`epc4_|^JZ zm@G|JCTo+8sh7##xta_nXep$LwqSvSyQ904dtLl?NWlSVI3X1mqZ`sOBBgMo zm4MW0j35#kZ`ktWN0aUwqxPjtu*l2-A4d`qN`)m!{>1Luby6ZUJhPPUM^m4UPiBQuLQ3GuNtqDUQNAQc(?ZM z;N8>P&fCG;$=k)-&D-c5?w#OW;9cW=(z~fo3!m0L9ejHF*!ej4IQh8vxcL};!hI5a z3VdpOPWm+UZQD$!U!r0o_!PwJi2g`BN zti}k7NiY`F+e-`BN(aB5es+EieolTaer|q7zi__JA`Zo<|5zsoI zLqN{}y8wp(rvR4#w*X^6ctAoxK|oEw$$+MTEdpByb_nblXcy=Z=oIJ@=oV-U3=d2Q zEC{R#JQ>(Ds6|lgpbkMjgY1GFf}Da}f-EYUSgd3AfMsXEs%v42`yK#L&DAgdtjAe*3GLH0q8LC!(0L584^ zpva(%po*Z{Ad6tjV5?y3V4L7x!S=z9!Op?1!G_?F;K<;N;ELedU=Ul|#G*B9(86G8 zurgR1Yz(~&_Q-9{$Y}=TvPk5x3PY{I!rju{%H7)C206_h`Q8~h%z)e#iJVn|TxEeA zWrf^igPdfKT;#0fpb+Gq46VGhKzV6}veE{5#UA;@+0*sEsH}v1tZvXcxlS5ciWy*s zk+8%JKZ{;BXp4#1!2XS}{TkSP2b9NlD3RSz8dHZ&p~V)^V{2%#19VA?vWHDM!m6BM cSFVAG-~I;eo8H>c0Xpbuz@5h9Pa=W;1BIS;IsgCw literal 122368 zcmd?Sdwi6|)xf`-&5{jC*nlhs5nV89G>Xynl9-@bvXN(HBSA!|ilPW&wMtM*w1Stm6IUzVLJ;Noea}3*NkIEvexKie zKR#r4o_S`@oH=vm%sFSy%r;#9pu^>GICA;3EQezi-}0|kfByF$9;d@G>ew}-9Dm5) zIBAtLvT@QSi*H!wU$XR;>z7{hE&qaRZoc`JsQ=o9{-v>-{Wsj~56ztI|JE(nEu2zR zG_o`U`u_X>cHg=W&E46*Cw}+9o83G=@w>J+H|X#C-;D5m$;`;^^?ZNyOxv3q_4oa6 zzNWt)cylM;PyDX^&CPtT`>=iY5BQ${%mZ&q`T5Tz^!LR#ELbdU{Hq={);k>6-7~`R z(d>0sX3Ok#Om-CHjB+@RqNCgF+)u~zU98i!mqlcF}EZxNWhlb}>I2}ts z?O*+y5?y#}l$U$P(>rNT`kv`WwvL}>r!2khn&>qS$EJTH7nqJm_&$d3VgG7jqbYVV zN39^ah4+PgKl_z(sVPhBg1V3N)iH*0Yrj&icFOgOZn$ol!|^Du9mRA+`ts+mDYtat zO}8v?Xc{@7mBT~1*S=D2nt=KL^FJeTNolc>h?IJa`1_TH87Zw*m1jE~Mj}*NCXa?X zNghd+8Oe~-X_!4~X234sH_S^)Yvr9^1y8rPKRRS*? ziI&nP$w`JvD~)7PiaBbS1BUt0>Z$P1CqMT->;9t0>1d-E&qlJPv{}B*fk0}tg_Ql| zSCrV5DM7b%3HcsY!t#BaX(Z&YSR+D3`dnf8{@^Dw=}StJ$@@y1c%z;^Xj#yQHx5Ty zI_=k;_M0uGPlKk_Q$^O@H&gBM;ni-Vnh?)#nD>>g(mfp8F$nYud7Jc|GQ7rg@(uyI zax6uPh8HQNNZ)Oit~>CjVMrEecn@ieq1n5*}ixV?!h9ZrOkUVOQrakU zfa#~Fq(>cd8m$~U`QUSVs>`I;Ks9(h^WkYjkHcyi81;0C!y!P+>IHWVvPa!Qo1o%1 z;JPwOIfUozHfm=B1&-Po=(T*WPk;f;^_H)Tgyq|!AA8p1=Q>i9_FrLG$G{N=LF4ViA1+1>mV!%jCnAX;Je=zgxCd9_r~ zA+rqZuabVz&$3KE|0(#H?&*@9aIbn<`t~1u+I@S3l-;+-GN~-O(zhRx#{gCsiLp)| zCkVzD$)wb{8Hp=P{g;7tEn{c-zGT@q%;yILm*gd-jfS}y2C#e|NIqDfVZ^U1t>m3B z9#G-A9vQ}ph}kWf^W~u|(5t~(z85vPD@zyCSnbuq)U~q_tl8Ec(64_ltpzM$G@6`m zn42u$%1nDzL+ve=_T~?@*Cp*OIkdftsAhG?t(-fb(Jfweg|sPS(qINz-PVQ8tqL~F zNFPlB!KvOXG0ZoD@plFyW^b_HksC3$Mvn-ZwVs9rjk)`FL|j{q1wE1DwuTpUUTBUj zHq5$W)9@Is8J_;2%Nrf(eKzQd@7Wu1^+WTywKsNQxBGJ@h>(rdDLo#JO&0xuFZKL6y2B$^VcU&n|F> z6gZm#-R>8tEfOH@seRU|TWnviwv(g2Bf{O>VZaK#e+n#Z%t3bi*Oi!ZVbi1I3f~$DFq3O<2zk{+S4`!!fUMWgi z6!{*1xq?}!$K>}^*MqSfS&$hw7kmR_Oj>A|xyQp4U}$*KZlk&{3{cq+>ia}4u+kgPR#GRoe2e*rvoJ^Ti4<*mD7Co-<30($p(I!~b9PF|Vh zt=D z+TDx5{OY^WMIGd4%lt{FM;E=qx6rqd4%E);Tuomc>Ly4_SA@uk%Ov2?X8l!I+*~EbXYA@#1=L$q6)cHe5-DHneJ+TSHLJvMZ6P_rNES9l zl7;J#;-%s0t@wzQk`0g0>8E_y*seYXx$dmgpS`{>R2hMkqp!b zzH{`rY0cMOuiJMjFRUBb24xFccE$3Hdwpv=>5?7}yG+y6e~{y7-x9rJbnNjP z0n&l`Vk6LDtOg7I0slB20@=o@JHfbv<+%6v(#VxeuYFs1Tq}>zZ(gTXabfCJ4tS4m zdi)zqTC|`3F;0<{Nw+-PGSbyAOR^_d{hhW*feflY0Z(f~&UhN|}tm zWLQ2YNEzn8D_Gb_{$Pg|Ve9qk|KVuf0Hh|n8YlPYx4O{KiVR{&B`-t=5SWtii$eIU z05Ds7PLk9AF30**|Tm#Jb z04fb{L~*S(lJ$GN4?ztyS!r~ZG)il=mhU{0tbZf>cTGr5a@TeUHa{udB5gFkEYzAO zvL}$TWusKSNn_k27KY5Ab25|I8T;5+5Uep=9nF<`*1ogap0!3vY)5=m3z&^szB#^1 zR2ql(2Pr*>j5}gZ#{I=&J?^ZDhUsjsT(In0dtyh&Z!O7#8@6FMLH`Nzx z37NY>ruzYG@L=;N!B}*0eZfw1d#GS%^T*Q{-W&_`nggMNZF;hKA4}Bl4Vk^6M8kmD zlbH8uV$K)!h{KDGcu|lhtzf(-_xQc%Ti#H@Jr#&kQzM*?XxY^HBXS(kNmG}|*HP}L z_kUr<2mC807_4O(jl%nQ=duE-JE`fSW<3kh_~y49-n}wd-uaHD@-~;Zf5R?bCE8J9 z&K?kXw|d~t(m=09O)zR00I9@0M^saep?6RA?-S<6Z)2iEj4=LuS zel=m)Y$M9qlU8K!$KHR3&MM3f*!6PQeAU|0n!0u6UD4*14n3#JjQG@V^a40~43n>sl-mA4o)wBL>i&oZ~9SyG3G&9thYgF%D>X#Jrcki-s)G3};73wUG6_6~HWMR}3v^ql7 z>z4NL5j#hUAfgK?A3H{P{`mBf$>~n|6ii%TgrwRt&IZ@ZL2HZEZ5~nmx%WHsL0k7d zl0-ucn^#nX%^NGj<}%Xn1g{QW6`U8mGOI6MA(p~Am{EO(nu1y?VsVUWu^;l(iI5`w zzSaj=rnQdQ(ivGMn|y`e`{}~yrTh@?dZ#ko{fnX0@g`R?@7U8qq#%FKPmVqk57lKUcOTN zZQ*avB0l!U{v3|)x!bOu8$H5%tWfKSB!lly60;6SQumkVj}x;#r~EH#A;o8=@&R1X2xR^POgZww|Ql zCyQQ%rPWVhxrF=LUx|KTX+E8Rx_vRVu*8H;l+IfN6(C;g8YG*R6Tf%g~*vK}Sbh?t6G`b;b`kmu^S(w%gLW zw>ENYIOfFpluF~jbe|^0*8@m^^;o`R^LZ``lFK5qTemM2Gi&zO$zXqdzzUfkHJEP& zQejiUBFF8pEX42Lncin!_^9|TOd8XD>@Oi)e6tl4F}+1J^S~BS&Z}Sdwirw=EC(@B z*Y-{2Azb~wx8+5MWX3S&_cjQey?bQE*byCNRCjw@lDzDT^U+|w@R2{@M4tI0@CTSmM?0TO8=E2L31~VfAe*8So&RLnBC!f zTXdW0;|;Sj`ydcm^KdT;JZe;CYRog@ujC5atzp=J{+{+E)}Of>XweH1gu2dwJWYzR zyb?fXK#MiC^Z{y=Z*5;^n3^KVqIX`itZ-nw7dMY-dd6Rg_qWH8}S%(Cw9!FU@{fH{?Uoz6^Sj|MCFYYTz!(a3G zQibBBvnhzd)T15}6T#eUZBLJ|d1+3pBt>$Uidj;M7Cl7!!JpeOu#i<%g|QB;hfVwJhWvv5!P%vDQ`DF7V{UN2rUVN5mDY4B~|0 zx>K--AIBd{I{lF%)jMLilx)WbxHSJKJ*%Z>=332KaMAW2Bd`{m^0yQheA$AUi}YNK zEM9_9d#gWqsSwL_yFoIr>m>w`7H+G=kCk>aNh94jF4qXX67D@fQ7_>N6iMxVL!Q2I z2D83u;wI9u)9hkXSMwk?#1FD1pO%uR*(HB2CEa*b(y@s73AV>^U~ME%&DCEG70hf~ z7;eT7gFzk@U`gNK&@-5RB62$38WB#MGJ*+hbPVv7%bFeWps{se^6t-BF`|0}d~X@7 zRqyS0y^ut5C>*&w;ues+NaTWvpc0YQc-exM|_mEuqHTAg+}MdQamPMU8$ zXi^-b?i$LoKI!y4otC+%cUJU-44XSqg$;A0T2H?o6w*#(pd_n6y-XIab8VmLi44wP zHy;d0ZkQiqF`IqHzJ4RV=b(Y5xXi2Gz%$#ex7AU|t}tBv0n4ab?LnR(LYoiDj@pV` zR-sy>`Wbmx{s|s-^(gMWjBJ@`m`N$FB9fQ*cS+Yi=QI+3l;WsuXOdsrobS{mcN?jm{u7yJNJFkw6-E-?r8eF>aH=t_KyA?NA)(nwWq1$nx?*UbGC-fX+|5m*@Z^> zPxa-~3?aLxJ~6E^Y)vzU;RZ&a7Xc$1L(u!bHFMJib`qtE4 zcl}EN0E;0oL+u~47tQdSBTQp#M_*xohf~}EBaHa19)}3!aY65kn^f7=EO!GIii*p0 zNut2&uKseRJKYs8pt5!qeBA^aT4TC`_}vi4=nHOdaZb88K6NBcDo1RTx7gkBxYO~8 z_wJx4JyR9U6qd>}l9!=Zv^~hc8VN9J6CaO}n8`BUqmr{on_kKkt3TRlv-wG8$dfs- zqIgeELO*m(FJbfTkeMR*APF%?=Uj#$O#ZsGLY9Q)_rY4~4^?gZUc(%tCW_QCXX6VA;l45p>I3}|bBtl;lZhV}v}YGb%p4<; z6y&sLDMI6o^08PxmW24Ybx|l0YvS+0P+RPwNaBx=P?)br^_RG*#6Km{VQvI9cz!7H z$ESIvmWf<3lTr+ie#a?;l|q%cZLsUujP=KVCQW~dkyG2gNzJDd;@(f{?gciepZ*QC zCy8T={$tLSVaj3By%W6ZYVF?|8b3{l&%H0^t5BWr!osK*dFy?)Kve|w3i6Tn#R65P z-`yI^MH=Q9W?uUdjGTD3AZ5F!t$-@+uC^oEx3s%RViJDzRXN7^+*Dd}sf>^4JMr6! zQAA(fUC&Cvlo!X4JNK$^RW;j(bk&1A?)$BbS^SO?NAxu- zzJyBg=jAiU+wxuBplGu&ny6jz1&#cj;^@0rDp++T@jJ>K-uu46Q+r*|oPEkE{+i2I z9N~SjE*MvS7Hr~JPQrLh=LAy8+_gNFb*S7#-6`<_kFBY(1i67dD z=wh;?m&-?)e2l|Q>TR7QZR3Fd4>+jr6-vs34?Ur86%S2!1Wy*@YBtkjy_&`f-1jpn z6R-0#bL;%3G0@uUH8v+^RHjdlFQ|0%-IJ}w-o)%P(@5^2qKO%2w)WnZAD>Z?yKg5C zEI*gh(;9!Ytme4rF*W7Uu{EbgN7hut>L}w$PnDACbNF-ueMW^pUCw7t`uO;QGof4s ze>IM-x>`r#j>@jOOB`vY!+LcI7_jX*1N=(h#~DFZn$~|8r5O?*Iv}kQ^kUXAtp}x> zUcS^%Sk^NG{B4#wvpfBltewYFyHm}#+o&_lxNt@L?BI}SMT6Fvws;-yeQ}*(eqTzd zu5ZY!vSRN1y0Wy(sw;y4V^qaZTW;DKGzhbMCqXEU_hVMDD|m@xwWCRfNd7NsEo_Wl zmuc*O)BNJZw{A$hT3-`Z{xSsIAZVAguN|mfKU_hw)|4Q zFY&fKBHxRnOKZLvy|(72=-D;V=m|9|qT_0Ae;TGu^R_!!GlH+a-FBMr2}j>nJ1yvB zcsW?oRC!bE2>M8Q#sKLMEa{V0c!uR2ed0+ZEml$Aclj_(sk1m%Y}WxxT_=4jg_stO z^erSLSRlQKgmf-F&oF0|wCB_WVn;UJ?mQ#LgqfDZOYtR0d+C~*v-PVp2VWIrUrjn? zZjHEGi<=+Wsr{@K-WP*GI3$SoS=j5k zyXSdZdUP`AZP}(1bs=3J}@_WOMKw+=rQquYrHLw>-+`Y zR!Ic5sYfAM-}gzY&w<{T(66t*>+bl#4bki412;#nh!5Nnt&9&WiB5BZ(+M5KV9BB>F&p)TH@nm#Jn@_IB+(&gz<=Gv`F-g)HNA2zM_8Np=m$tQQm zyi!ji=mzh!tNx5sGWgPyyHQ^2#odXdIk_uT(}>I2YT7?Hn%}fP7!@a|E7;zRqM0h) zlY33>k!=*Agt_d@@@0+X`})@w#(a_FjIxkPz=|o-;dg#DMJ)MbU2zD7CV8XnQjR2l zFK;8}#<00b2j8dwOlQu}lyeEvB^`Y^!IsT2)DYa#UB6>--=yBg>=m^kDeSdgEj55;K_XT=_i)ek}?v@4DWM!(SofiQP+s1cBUtgNRPoV z`xw)xyRK2DvXPi!h#1$>7_L9e7oeBoM&SlpCT^!Sb=x>((J8^w-TiC^$M@`yr-~B6 z%Ji66vE)x@^q51!6%GmWN!~WsJ&y+zJ!8QZ^_!3BwYDZ0#z{u}B@blAh{c7IMyb0u zV_vu)XNgGEvHsmF1#n}a^LHaH4XjSRwqOQ1?}$=Ew;$fBfC#0x3iMhrI82xKMWnSi z^txnKnst?YRoGjr1X^kPvcX)Z{@F;cWxU-_I@#j6qf+cXQEkl`hRTQ0Sa>ywzLhN% zTVS^}+b~RuHQOqkC)DmtdxGbtqWOlkwmk=8ecYZy62?CxoqtOGle>rBpy{nMtaVN2 zRzwFvl{qmKz;*3T3L92Oo32U|MpNafu>uzIS|qFx610r=oKwOz$FCS`te(Uu$e-bT zykl8zS6y(K5$|Q~?r1`u#`Yy3o4n5%VxLq>{0 zih1m@m%$k#^}~8_k~11Z-scRsdYjN$-BGRgh)zb+aHdNsFQpn~ zY%7M7h21j445HdZ+$@}J>lhhh-d@{-&&%>5+#If{Z|`BVvp!k(MSQ>e?uF43+(Z#E zpJ&U*borPn!A)0&#eucf&???zQeAYpm3-zzFArCzqUWpKWr$i^JN?6nvV!-W3{tpu z(_L%y3iguT?MknT_l<}?Es+09jP+lne-zl3ejIj)IIr*{HlKvfb$)_<2GZX+JU2SR zSU^5%&p^87D=+g5?@PQmPNV2T-_huRX_*O4RTX$VEg2nTkF&z66YgS=hWzSTnU@)>{f6@!&?Ly;k^xo7 zEMwc?NOdEG&d|K}1Wofc#>SjkBk^`@9=Pv5C3+r1dXA9$DESyAA7u<_^n}2!^yKID z$V|{tQt7cWGF~Y#Qbxv|X2{<#;%R5}Wc@jT&m3{LzY%LS7EEWQ7?Jjw)64i8<$Z2? z*&-6|_L*gEqq?ToI_>#VakxCiW0>vRaCvTiyE1+dk7-7R-#V}g1Nd$uaT{|1gBbhl zpUkTTuQ9DABKJj_BgQ8jgcH5>n$y*bRV>m+@{$*0hQFw<6n4jOMfzv4(lY`pu*?jO z?NEhJI%6wT>>^gY1u3e(gUN(@`32l}!DMX?Cd=|8;p8xW_P22w;wkKSdqmhZwj=Fg z*qd&za>Por1%2%INkQ>0N^He5}P>aKM{Q6~R?{T}v*{(XoVZ z%IR#mmR8`li4v|>S1c-k$Z@<2S_#ng)y1$3zLz^1WB-*tF+QWrN>5^%G#W{EAd!`k zWVno-$i;A$uB#?fUdTyMi-EoW)T<1TmXMHo3EZs3R)` z*Q^VT2(}+NWSOw4W_(U#wNW%q{V^gME1Plou5^CJ?G(Ep1GmJ6i-#dzd2$TF{}X?^RvQCEH91ffl>C_2n~95vDx9Lbrj zm@7MLGhr(8381Jj67@Ls8%iRHdIpN1iUgryY@_?sZO08V&_xV@_`WY>QHcJXfFp{{ z>7nY5NV0)#1Bo=7vxVn9k!1Z2p8ceikt8oEVC(l17F9u-<*=?sr z*n-gXw)_`4DEJ5FiU1=%N{1!TV^AtuF1xf~PiOZSiIoaidOziI)k06rO0|+Nq7$Sy zf!;)kVb0m1da=849gE{IUtLLoL}j{O%_b4A%!!t$uuc`p22;ZpwQoH6>#g)D>U3R{ z&47XZgFqn{fYq-q(;#()E-Hvut6@M`e8Vc8fVJ1NxCk`(*lGx% znRR%=s~vU(LUTjw*64O4-seFMUV?Vg@@}+&FQY>hlQYfEsUFHtCA}J+?ikEq62^N1{fN5&JM*C{Y7OB6l>QogTx?8A3MV>Jj8^d=JHS zb4v<1(Qk<6_HJA0=;%yQ%QPyI9~{g-=a78x&6EdUEFn54#m5Y0dr0-IV2%k=89Hab zhUhI3ocnefzr+-AM7@h1Y!~agWn1*>C-Y*TgjZ#W9ezGbUc)R#b(YY-h-;m(Z(BI9 zHGJIms4~pP0qV_a>5on~I^HWLT}Aq|OnMUOBkk;w#6_>hY3pRih)?1e`213T68Av* zfd2ekf9}_x1NMHY_$Hab63?W)ljg_fezl1;P54W1yvB&Y>kFY&X-%2^rSGQbm{Gkh zT2v)Y*?6khs9sCY=r$r@TSSfwkmx05#T_e(Bn%IW)E9kD(aXdQ*Yc^F|4B-3mig*{{dd0p(2=1R8NQ+P7$!ih<`yUqAv#j`UO#t&%vC zM7t!)Nwi2}G6@my>G34)kpyaQaa>-Cq``7V zqUcm9NVx{e5n`>2jvrn$<|3LCwLmmz>-hWoL_S1J_e5R3H)w9rJFTb|65iEfJHT$fC>t{q3MB*%Tblb-$2w~jS;cgjV1n;(JUL=tto0hd_e9%-qIp(cx z`JndM$+g~xUcpD6J|}GDA)cdoIL`A7tK0ixfNV_j_ZMi>W6WXic}duR^k|^Oo~~^= z=#P%zGe7oo0g~S}-EyD~S?lq2H&qiMCr+TIYJb$-RGlAlOM@Tjd~v!pV6o?g6XywX zVJo*(n+Aat;r&*pm1?TKG75~zvC~`D#>PvV1x zcO@MFUzDBq+g%8*kepEP;N-2Y$&a1`Ey|^!JJZnusV0CTT)mUM*o?Tu9TEK7$w z-gY&3pXv$22i3jHcg;u^j@Fb>|29@OKM@1!%p+vV9)3na_B8Oz>5~c-I{HRd+e_dB z_9;bu9jDG`O;27uE>WUrX zpP2Z2`J&_Q+RF1VuVt^j-e8TcMboI&+8~P`&xZJZHYtPPi-@_hHn>VNqB+MP6i5A2 zoZ+@0D^bsY!}H=jm6cpwJq9Qv@t*w$dlcX5*Rke=uhhVGsq>;_Lk%NUv1bZdL^co2(FpF%w@i zY`w^+(-pEu_>uWq`ly5xk-| zMa+R<<=N3|jQB>Xy%MZEbBa`ZCD?S{)aY575e;*r8mX(`r$|MQZ@`f#u}nRx1|;R= zM9c^$&JV}CJ*vGJbISEu`oCmQ>h(~o<>ZOY^aKsKNk|z$B!xsKglupw_E! zIcvR2H63YdRj85C>LFr_ksuzy{c?<>M~xGh$-B`{kZgzoX*ws!2Jj42mB}8&`pCe3jA5h*%WYmHG)RyWwrhEl{$`4iczO zH>hcP#aCfa8J@q1U0t!jh#La})lawGrawe1L7R)b(!4~QoQI2f88gFfyV$Lx2{9sbUJ8!v=nCm2=k z80nbs?s#FuMN(7))^L+WQ^`Jv}xx@^sFyBDO0Jh zMmH_^xF<>;K&wda|K3phpOMbA{|n~5Za+NK{w67!aPO8cyMKLmQ|yp&9nzx$9}s$L zl~e@ANN4ydQ;c6C@-o>a(|m=)~GsT}Ji3*x$caMHtR{*sME6 z{o>f2hB4CeJp&B7RG{Hh?@qsyjS|umH;d!p@i>Iz?7ha}cu=ANknfHM3C0Or3pPxT z4BKY+<#skr{X#Z$y{#`$Pz%_5-Mh$PCYT-^Q(>stdAHLMGE+f9x3p>zEGH~Q)D}W4 z&7wa_Y0w_Wfg(a?2<*z3E|%}(Jv5pu5_suCar_nW^f#W=%P7&{=i<~M8(T?Vtq?0& zG@dWjB4e8LO|`S8NY=OYu-AE{__!{27!cyuEADb|`b+K07hsU&iq1f)dB23o>oY|n zLnRcqM9zd~4B|#}CytBQg|aFwc#}pMtf{N*V{D#Sz*t~#0;VT(Zmo3a+}il~YEc|V zEiaC*7N@c!DrXmeMuOF)q^rIRCKA)Q;m1pmf)oxpTbEVEC`Chf!X#rnm`rVlRi;4mMQq3U?ggIgmu+VVC0 z6&%bSbGzlckR%Xfe@bE=kE4I^NM@|H_=(5~bJ#5DJNtbR4VG^Ng`mN);(1|zLFZe( zrv)h49)VNN_N4t9X$fbW@=}vq^wmG&SnAY1*+6e7InC;m@e|XP2WQ_39>4*XkQ1bR zi)A2@ALy&!S}9z%6~C#iugdvF*n4|Vn9nNrRe+6+fy;qs7-dXQBn<0)In(y~gz9yB3 z(y4?yk9IgOW|2T&^dNfBd$JBB z&kEedm)P{w+$~wvJJ2So0^&w^P2}!(zA0@Jqu1$4zh-*XJ=7PbsFj7x08csEx*;<= zS4!Iv^Dlz-s$BZfkepH6kgW5->6PK+>=M3f!^s&Qb%h8{Tc+7O06$%JvV*zYB(XMF z3Qce4^uIBoH-5m1o=r?w6=B^`fBZmB)Tge3Oc{y=SbP@fI~$L4txrw%#{2r}uH`_z~rSpQ&&5m=ski&jxeABu#!Q06yMNz21Q za^ew@_(CY^pQq);W;I)QdjNrUEjhO4lwFDcRo!>5%+&pasio_l@YTBion3@16kQ0R zUn0@lV(<0c11y;M$=lI%U#JysGRu*E5p#w|Y&!Fb;&4D=dl}m|h66h!+;;o=NcDPe zYqyjw|Et~sFggyfc!3+_M?0SEJP2Y0SooPaCsCUc*GUOGE|~ZB0cK!D64L7YMk*!eDhW@y>-8yZ@B&#;2K;-`c3BWN;~n+1vFbf(w9m)?6{W=w-G*ei$rgu-?(5 zuh{5-Zo8LWX;hU*v8!Ex5FwW+N7`Aw4Lda}&7-lfIbv8V&_IIprg<6yTPPo9sJ8bs zRKFT?hpS(X{<6V*FKm7uG525zqUDjxuFupOi~g#&vv z?N291C%%m@Bah1{DY&R6j*$?jJVMzhJzK$SCw8SI<2;?Q_;<_~e| z4GB*Jz8C($RBR-cbvcLY$*k?QEVpFH(%Zo75RY~JUY22%S$gNN9yjU(mY zI4&pKkv-p)flj#3)$(4;dSn+Jos>XVxkSWhvglMv8>F)Dow$Qc24TS2wbt}K zuzauwfn9ovWZ8DTDjzB0V!c|n$o`&rz5Q){Kttp|F_3|2%hNz_U!pgwle0G?_cJ}h zBA_l4QDPQFC4D0To%9mYgmTa4i}!Y3DUKoXF41{)c3z2{=i!U^Z^?@v?~;d1Uk;x$ z^wp0g%B+@S2`CWdA7N1B86E#bfb}3J_K87%20?+(>N(@W%-QSh@QBjKsFsV8Q=qy~ zfrRU|R@`k=ABbN=KViP^ahUL)^^cHU!>_nFRGBeT+l(IoMTiX#(MXOj9B6hT`& zR#oUStX&ROuai1>YO}fyk!$W?>GU;q)-K`%cktXPZ7kfiZFiQ|rSl}Gb_$vley-LJ z?s|wb9JOudiP)9H+v+jk_1(*d7^jk>)f&t9*q?!l&mrcue4DnD?on5RS^COqrOiU` z^w02%Y|v~Yh9L+(!251Fo#aqImrnHBar5da3Z+YRgpTa2nOoGUl#x1aSdBXsdewaz z&Vj)QhRGcXcjH&tcbamj+n1ozPCWSx_fDn zD$%7T(`5Ruc7G0T)BSmXI%!#YYzDAf%IK;UhUuQU45keb;2bJNi8yNcvFbO}=yvEK zSgVsV1nS{D-S(M*UC0r4vd)=4X;HjG&eBgl*Kws|ksQ<4klmtL#&^YJuMKob$OGqBC_JqT1`_dlnL)E@wCk2dl<(|`@1eKhba)V^C> z_pu3W}H*R+z!N%(Yp3rPq57yW}gQm)`-97#OuLU;ZABsq^2Iq zm$h!Et_L+iV&rPg22b+X@BZ6TPIyO8vTtbk;ac61JbjpI<4Lv$Z;f1b5KJ05s*5SQ znm_K0A;ya3B_ls!B#|-TJ__0%!ircG!e!-Ibx${Rm)iC=*(pe2u2n z(a>tQscEI%QkL(Utv04xAy+G%KAc<|`a5!?E(Ew7D;OVtD<|q{X8+L9v?2{#kw)y_ zy<)eV@)*=7*jkZ;-u^j*`hOe5*3pzS0p+);mMVo6(qzQGFa>n8fd^0qcXmE`m-HTf&@YrQXa$5UI< z$0eu#v}Szv`IqSeG&qQ+OmZtsrD<*-_=6J`J zbaice)hdh@$ZCZ|n@$F4!>4sqShUlrm>dni_>I*&!a#p=CmRr4U zxlkxK&57ry_PNj&GlUeN-!Uvr*%mA6tF5ez7OUfi&>diGgZ*8e*u^KO((k@lj#6;of6LXY5Gc%InfgZjEv-9{loZa z6_uLOv;N0o!qDlHHO!oSZ)S@(>=`_ULBt-z1(y~W36I$|>>RzEp}zwk{NOqIjK3Py zYuI^}V*9?9y(puI^CnhO`@G5?h{HM}%deatbgR*8bdV>@-xB+-6tx{wUgmuR_ZP@{ z54DyIb~}j!VL345g{|qKSX#d0H;B6XM-TE-pR^zvlx5WN`N)?poNYTzWf7Pw_pH@s zZ*>uGGnwrMenJxATMXkDilr@%qoZZTG3~o%Sb#cyC!qj8DHUpc<#Y=^mR>vvU$?JI z)R}z%538b2eaN1S^mr-<%T~ccl0IEuEf7I@mX8=9=4%b?eqm=oxG-W`NS6MsoKTq$SnY&X$3YsAldPro z8j4n+H5-(Tb6`As3Ds+ey-X~9A+t=-eaGN3aU{1JvP?MDPnj2%Z{a#YxMz)I1EGw) z&&$b@J3R=ly88{G+}03#FV!QX*er?#?W(2lkfVAU^u#dxEAv1+RM3rCLC!wAV)g9gz zj8HiaA2xr||L8l?w_OsOx7*wDYhLs{#z%wshyE==9BLee-;_NH|3I&didI5rTE0m< z$tn10$+=HcD1>s6^!)`7qndEBUYbv`vv!gtQQK}iH+yKF+Y!&V;c+}@7agp61a28R zFt0I76aOOz<}+wYOs_a8`+KBUyJB+&Ps&eWNFav$22e&38g@{An=VW2FE;+)7dstm zYU!RF_Z)su9*4+`dmqvyl@s#5KT$Bfio_pCK-nR$CGLQ5;PV;xzMu>Lk4E&M?g!g0 zfe@>YrgaX2&nFGJ<_WnR;wOc@#>w_&cZcX7@Pxiz^xvV-s^f`x&>1kU@DZSX_*aoV zz`2hEaFTqfRkGLshaChxW9Ca`h8*^=_i_&7F<>>?GR6EDnAZp>vk#y%ZJ>>O*}Cvm z8ORq$%$CwefhaP@ysz{TlICl211o}-7<2I|ll`Ey$wLL{Hqss%6Qy03b*7Nn<1K$=ftVl9k^K3A`5HPa< z0{{ROW^48fS{W!810}25YB;A091(WFnP|gVByjS-3g@@O;C%jyz!|c~>}w0&-vy!M z{(x#gtVY`usGG@DYh(?UYYGH2EaPi|B^NBL0nQ5&c>~ zTAd($v3X^rn97>~nRcrWFF#~R#Iis>`;6-l#mTAP#ve9Z;vi$T_pBBLkxLXlG5$-! zKRrYKC(gi~#v1iwd{CmlVQmy+XX?uuHg{5I=(2`l0PD*dwBN_}Hx2fsLL0#LjTAqv z$D8szN`(W7QCdCmmMGi8HyaCZ8@O*IZJ2~wuwLe*=jdSBB5g@wh+U*#7VcNC&_fBV z%(X(8z;_B2%X7SRfGcOx_t==+04As#fDq*+vp1q~ z9=>m|Kg^=U#zEjEeJ_EW8DVwSOZMd;C?~dx2&2Eq>?n2B8zI7pUeTtb41X__6k*`T!9seF18nhd`w~zDwdBF!rjAFAC;^JFS`ds&UJl z6-IHA8^pja9MW486HgLv#{&0Jd_|IHYE7>)T>Ykf4~?{mV&ere*BhemlBRv) ze!0(5Z)4z=P1L`uHqc_z%3=q1R(k8%KaGo%cDUU=c%In|Gmv`xW-xxy3(V^BJ3R3P z#h|wg`#8{Q-F1fe1>QTxp5CJTs#dNDs2+%p+#8=Uwpg~JMw@j6?dLGO?!EB^W52W) zaGX%XgxBC&!eok#6p?mCUm@gB*Ym-k=xalc)qn~BbcD1T)vc;m=4$o-%|YkPGtR!P zbaBvE%sDXH7j8UJr$&(C;)<|G{TmM9z0C{8zC>}VC$Py9oETv;iyP_dV)-uRiU7UT zCXF`Bcl%4?TI^pGs*09gYoAAzQC|!jd0X0L_!g8H$tAfjIsBwuZ&p=$Bb6w#XE4Im zQw#CrdS^~`^F?ICRY7)!;?-8Pp3j5P^ZQ3Qqv!UI@W@e>0?T(~O6zxJVgw|8TVZ6B zDZ_jvduqXO72?x@k%MLJBicC0W)U>lr9HuN{mFld^&^O5{SCovGb)?( z>CdjZ%4TyjeHv$kRrt@3TF`y5gVAiH!hyZF)+4zbkeUNl*GX$uPR>${0K3f%}46z>rD>VKa9`cWsGe z69-6CPTX>NThG#^v@cxP4tXxJjidb8Jq7zd=#pBLyj|E)4+J$=6{>#pF!fiU{oZL8 z2~%Y7*5N@}?nebqPOZW@XPAXtrr8@EsSZ#Q;aaU{x6ypSfo8LGCo92_^wNA^8=soD z1c_%X*cqK6hB-E$T8GUh%v8n&9L~Eq2TD?(6N2+EM6Ktf)<^`d_qnli<8SA3Z8P^v zx9cE8;vSHoj=5sXP@diKncw!@4C`W}vBabxLLM&lSXH)|KT`ui-#VKgnU zc;3&FfS>Eu_#PW~*jwLEs;>jpyn}5|>{n)=XbBxb=BHK`@x6!}c-!j7NDYl;nFKV>Q5^?8_~tgL zFR#)wV@z#ZtXSXw-!^Y_SN#|c7pc!V-KVwbZpX=hYMWQwRX>^*aQ5Nj;BHZ*F$mzr zuJ>bbk!-7N_q|FU5Zj<#@26OWtjt=NBj5dWPN_-PO1s{cAU*OvJI~J?@V1<( z9~vfgyyGd@`4z8^IC+*}TY3bBq9+8mKg+d=(SlxH$$!54PIolJ&q11}W*#-C6>niSH%Xi-z;Tr0o9(lQ9m%U)M^xRP8a%c2R z^Bdd{Vb{abv8%4hry*TmFw{O~(aUFl zV}#eGPku*bP+a=8 zs0htwcMjzI9C5PdL$+ZDQESyIB9n!v>>I5TqB>%?d7rawx78Mpi>f=8UnpB)V}#h8 z815EBZMu>$! zr<;@5$b|+p*IQ9QA`CrUu8bsYJj5MUf&W+vLDRrC)NIrrL<7GAf$Nmv9Uq5Zh==B&*jljEf)qMEb8vj2T98#zS^5BkE?w`53U z6Nxiga&JuXCMInBZ+lGlvbo~D>E(EZ_rC7~fUTufe!dC2IW}CL3h&#|FkxHdxGmx8 zZ4vLZZQ*3@$h4=a%7Ge6G;VbdXEn-u!`17&tpn5wyVzb|+fa@Rh#-JtBju@xt0P?9 z^UA=A=+UoydRgx)pGEUu`SSIS))ZMdx|qkjj=c-q(dk0qdRu-2oDA#_`yS;%gvfm@ zl6cFjJ?uug9Eu@a)V>%?qd)5Hye=vN4U%z?cAd0(aP5tj*lImptXNz6{5L#&i?;4rBr zlHS|$6s*C-L1h~0ZF!6b&L`y<)mvg8^gYB={HY?pG)HzBkvFKw zxlTGims{&OA_Ne;QyBmsh0C`XTR*3$w{<3jDEFm(&53rVf_y48;pI}kp92yTWp2&NxD|P zp5fJ9^Jco19p6~a(D<%pe7DE`p?fsSrg}inmn_xgl*G^gMaN`1FpRE7`Bvy^X#Ig< zw{^wgwxGDkF}D74wl=n~)v$}2#ig{@auAhJat@RjQhEMm{h!$Ny&|>p)h}{@M(sDq z9bS95y{DC!!5B9A9gGJJJ5Wk41=AOB>EF5#IN$qU!x?MCvHOvbeu%6-22+NJ3n#-L z8-vq&3d{~iM@Oi}w=jWeq2jO>vS&mw2OD;vN9?eTv1rKQ9%Xvwlb7LD}!J^YtZZ$qT3Hr{o;Y zdbhnG{h%1h%tihs`fdDCxi<{I31SniXbX6O#2i{}C{f?FWhsHPVjn_1Uin= z@HoG*M~hqRrEGs|>4T!r{0S)EB`JKOhxAn5WP2zksll;;>b0^Y^{59xwQJ~MvD!5BV5p9vheq{>p@&=56N3+lCh79SL(g&zn!1-~NZ`+q08&GO z*UqrySV4 z@K=zp7UZYsih}$Xvo!J(bwld~`Sofp4q=KXDK5%Lz68vG?-iZucKVZ%H5r=!{3#*l zvp}(FewJ$Q8>IPU!P(o|DkP_?O_U{`i{1|npjmV~b#-rkNJTpO>agBiOM7O~Tq!X$ z6yK&C=>79A^yW$F&3s)^dNalD%}k9#W+>Y68B_cvifhGfsRc~1`S`bNG2Oz0wl$C$ zr_*g!qu5e&|D7EwA-7ydm=e?MbYCW5&E9V;qyJK#5i3+7>c@-zljm%N zUUpm!;)|}Aj=W)1<-ig87RFZhE<`4G0kmjd|7xs``?#;E^MqYz!-+CKhUysR=+#!9 zFqxcBK2v^PrMe?32RF07x#HFDj_ZhxScNjc?(S+n3A=& zZ(ICrXMA0bxmL@(ud$V~=Q)?)qmy;LK7auGJk&N>)b%Z7a5nP?^C&1?5rbyyC z+4Ffto23$w+)~PfqSB^N8)`RxV^nSIfAs{Q2%2Leh1yQ+yOyUl3w6QG%KsP@#A*$b z+kRhZiv0eaM{R3*dr#BaC2hr_w)w8UQ+a1d&*UHE8X+e(Pvul)sQm?9<4^K~irfYj zTvI`b{vF}xVH^HI^Gvn9cTa$hd*bgAWi%poRH*Gps?)vjaa?A)!4ef+Xjf%7$qhF z`ZKL|=P4I(DHNm|O_WS}Z@79>^oVw6O;s$fY3;dfqtp2ZQ*F+)>tN8L?IVd73$~-M zY*s6O%G7ZkuxV0Xc|zD5IvDKVtuOal!a-JZl+n?5X7gK`ZOb>_IoOA)H zQzeC_52a*VuX(?MJa-0+Rh#$Ce&;`b{IcKJ*JpHm4j5th^Z99~U@~_MU?J-bYfG5V z9IFXe55x#6+kf4iQ;bywuRp$I>d4n0mx^2&)$uv8CSR?4j=)}%x_%ZWICl_62rlbqvJTQ0RXW3h{|Cu>g) z5wwN#Szj95`hoYSDFb5wy_H*Ph+}&VWDVD5H3FX&)56`A$KiEp=25tan9&e3;X#r> zkQsz~aI=BGt(J@4xTJ-gGRi(e0X@A+bjh#T5EU8uHS$81k4D1F7wmxZ>f2Ef2+!Z| zo!J>rdFv8g~yFwWYEW;C(){g+Vt9mN{}pu(5qDKYJ2){tFs^I`9Wguqb=H z@$*jgKC~8pjqCN+T~LIMA00-CtF5$22jvad8-|O>Wwxx?>-`d|V(e)U|5dg5HCgSv zt>W@Ak~4DZ3FUHix#|t`0#8$AMf9?ec>$+hl=HT{;JzJQbQA^C#*WQH9af1 zPj!6mmJ9Zdb!~CX&VgY=F3!pgjHH0DsxE-I&J=LnxVE1HAxB+q{Dsmcn#_rf&0#1S1|y*cJ%EEe=~%?#rL)jJn;=Q?qg@EjY_Y*k z@kZsNh74fYaDm!ZAlPw{)-;V@%wu-!+fm)UGS}R{Z#P=p1U3(vYNl|Pnz3MWBzb|; zaBVcqS5?oy;Wk;&t>wM|Z~GCT@2ou;LJIT3^7ipp_Ic)u!+o)6%y z%*u*GTBIp08(4r56n>}Lg(jTF=Cb7quZQFcp*k54nU*ifjBp`p@-X4@ybdWibn4sA z)eYEJ__CgnA87-5bS$$T9ZO=k>(6g6w|bw&!Jo?IMwCePmSx8TaX`o~G*hsFAM4nl z{$#@0{A3wf^EwIr97PZ^g-#7y>w;ZQNB_LLjihhM^E$fWTYMRbIlHk`F13^Ypp}>S zx&3%!bJeK5aJ9!!q^Y}Y4Im?>R8%flE$h}-S z@~a3rJnX}*qAN#`@`%_u&*%vLzK{0)=q45mN2K{kH8Q zDXkWB-;by^1!y8)nZh6LkST2Y9)_RAANnn-`3+_t%m=f{Q8Z$*OVpa3Lmt`t0f6K8 zO_WJ-i@(!=dG}_-{JGY_8n`uTYepT69|%T^l6B9^kFmMy&*&#U_gSXy3tEkV7x$<#jJbje6r6~K3a7c$7G~_m{7Ojsq&;)!Yx+6Kl04#KPW?70iON$l4 zgrZ&g)qE6*zRxK^XQZHPJK;t122uzUiZy0naRIp$56_gT$nWaO#ApoQj(U|@*oc7Z z2)R15Q&6U#Ym;gV4^2O05=hwS>AHF%Nbs8Zs!S$cZc-+p9O)(5V*iI;{9+#!i81&; zcfW{z%B&|GW##LB(Fz`7zXo4O%%^_zI$MnPC5pGO#>r;4i<^Owb8?_H{>@^iw{6?C z($kzae8@T8XXS?_2*cPaVSXuY6mc3=MTM8MM<>70D6wK0Kf+G+Z*Vc3aK{26nz!X= z5X`pOXZ<_u&F_B@IXQLbH;9=Vr+2+nRVJUGGo9K?zU8jQG-k{Emnn>!*n+*yGsb== zN08B%EmW;`6tfa;-y|j@SF@=EsmwcmSZK!4T+>5orZe&MiGuDrzdUgn_0f8t93Tl7 zMFa@%@qtW*r@n{__U8uxeP^iA7#g% z;e=Q?ajfiSusePX9F{2=CD3iN{=IcheTv)RA_?h4pRPQ*GB%)nbcPGJ=VsLzz?;l3 z5r~UH{o+;j6h*EkJm${yQTj>^Y_T5Iw^77TY+RyyFt4^t_w1~HtK zIzE^UCRraFBP?ojVIFSO&@>Q!(0lS3ruayG`6n48?IgKO+Tq6uHOfsj++DX%|Hkn7 z5|IQaatxuV277xSzqhp9*HlwAHRct|f(VU!uhA2cFbi%^h*#@#LR>(CPSPzq@AeoD zMQ1>E`^=8LuVaf=b-7lQ>}HkcCdjW{4iy(ok`zb`3*X7;%#8Em z5`LS;&k4lgKM~0NI0)tj`9Z*sL*`o|m`_)epk8L;^&|Cle}TDJ6b=LiKc!%kG5b4o zwUy^a*NNDEC2Yqi{Yg#|tXgv=<0x^m`D_QRIn926zH0yXV0@G@Ijt+2Ym!&^hrF%d zC6T8&^R%CqfGhFD@Z3c}wp=h?K4ZA?#87XX93R#syh!|kDjU;iel zJ{Y-Pz1=G$+^lzAnNNEAR_p1ia4SpvAz4s--xlrlVzH8~nXE$pmgMrRSFvp72FW}` z_P~#~>hnkZLdfEup?FD%0OUd2VqXnSf!|FNh8(XRr0ontE(DA)WZ!#2R>=Q$e1Gop zk*a|r^d-F=Z}P*D#KI@=C_lmX!xBOcC7-#L;!Nl2ZhZse!)l;MdfK4NsI-3Q?&Gc@ zOooG8lD6=txTBxqh!M6y!%x*X3>z*BV%vj=Lx7mI0VPxHnBbMQ}8NTJD1u&!i#6s`q(Y z>P2ZY2~aw}UR(E1=aiDo!XYpveeTB~hsYhTt^PfNE(6Y_uAB8 z$iO{mTyMUZBXfm%!GPx%WimZb?ttCV%j&q^J7=yaV0`NpnO(=@4_pO;b;0He#U~!B zQM&ZZDjj@y5+>UY>uLV9_k}gg(G0P~HGl$B5#ir20 zfu))6k3r*Tn6ZzoJ$uhnsuPH6%iH3Y+v}2DCZ?*hwQI{$posUSFKU}xoy($}e#TIQ zskL*9_!JmkOn+&Jr$*l%)Z5`|VZ62HqF{2*fEwGFjoxK5t9z2pNQt+(*csGkmIq^q z(Y?)MoWW?3a36xv7A`+)izEX2WzOnRkuqhX7;qm7Xw%Vl?g$TWJ_)8n5aoy(@rZyD zS>#jLXD>oU4-Ld%NL>t6*cnd;tlQS&rCc#fDh2t&x)Jw)?4|*B0GC&oOulBHm=gXQ zWA-j)HYrSFcAi~4D`Ahii#VUDNY6frM3(u@?qIjA;NGKykxB-Qq zej|H2Gw^PK2ryRj3hqm4^e(lNsUyZH;~J<23vrw&dxap5I||q(tIrMZmL2PTYsY%i z1E-|ZIF0lneRGY@-L)9CZ4T*%J~B49JyvPlB1?!(OXLUcbz0LniZKpQD85xAr^T2_~p20nZe%x%VRdH=tBQA%%>_6A)F&+4mmHH$}ZNh={k6P*Fg0_Nlxo83z4w`jb z3~|pZ0J)RqrT$Bh88;+T$R9Bk7TD`^+l^n!vkb6T?tF+iASUeI*j!o%Nr33JYNPR^ zyClh)J91Y~UVj~)9FAp-{fy0+^UyK7E4~Si6ML(~rZ#iHlr=;-SYw6N$h5sa7Eal}&l)JJ7>BPV1^?T@Gp=221_d1OU( z@$4Q_IU}b^fOh$obWdwOZiZV((xz^*Hr`f`B@9h^y^y{k5C`3V=m)C%wQ?9Sb(77; z4K$`xeY#5eEdIOE?UZVwt;;I;(OR>S(W{f78Xvm8;+}TNKyWk`9Y~x;EuRL;4 zAId~O6{BZMhfMrv9g9omi5w9z%GSsU#_6=y^a_~)Q^SsUN@naei9l>hVDe;cP7~#U zd%DBd$RQv;tCp2ed&Whc=6kL*hJ8n79|~O@Vnm*8(o7GLKV2;AT%?O9K|}bIV+6En zI2%lMr(X)`H?g`RU8zHA`bNNBARUQ&D=5tFdujs}>o{28jxd;19<^bE;~*w zITv3VAHfmeDEuR!ie2Qa*_0o?nyJkdUuYY2*60Ik^v{BswKJ-xmV+}YI(ZJ*W+|2h zgvYdbCK_l`{qWB=*T}5xm&WXqgSNHe1QAR$yT<)hc{6S+H6h7r!T! zSL_5XbS`am?gOF2N5CI3W|D}rV~uz-bRQ8{hMSCU5GT5a@KTfyoy}ri0ny0lV=4WecS~z?k@&)PgEu0w>ck zw()3!z4l@!lmNgV70SQY0l+hya`m>9DAtB5E(oCZsQxVpCXd=e?(W4HC=Qt!Kl1{P zFC1cw;&H%MPykuir~~o_;}ycyN`U=WTHH#Pe&fSF0yl3QNy7r(EY+`cXx(M83MIHCEX`q!;@P|aSf@ntGGXC z(&7>dN^SEC6X&D2FFV1zNl-JhY+*O~y=U!eJC5KDtk7s){0};LJ*OV3Jc9 z)bZgTNf)*&b&E^;R(U5?LPdx*wMnP9lTtQ^2URBpL*5pVq5MTzJJrEf7HFuOfSA}p z{WeFi;;caOYa6nGIcG(NFY}enWp5fk^{mK1Pxakp*N0D}I+0(^QOD4?jDgh17%o#N zl|{{Yfhvi4757O&{c48@^pc11hO<{|oX>m+RJ6>t2ifyVur7Fnd~Qg$?qKY$(ohk}#PjFWpeBBQgh@xl^W7?blw zmuZl3<|@emxS9D@w)+&+V-WS`DC9H-HRB_c%lh+(Ogjnv9XYx(n8u%VNQ8-l5P-ny zJ{lR;R(RNl&q|<`DV+*Qn=XwW`8qP9t#D5+`q2lO!rRj)Mvr_I8KwkcZG~&Culnrb zoD_@Vb6U|t>zjMF$&1V3WOZbVU(1a7;#CAI?P;?Qh_99X+xnS|81@vC2x+q*RM8fS$K`vd0>S>H z(jHVjM)eeMcTU3~r+1mp@o~DOr=TG0Y#8L?bu9=p1M zU1}hw2AK^=zVX1$2RlT7;iXf4=&g zkN_s}!Wz7RT}3>^7aG?nlF(m?Fk2dIqMm#~Js3abqTwn2P!<9DxrN4alx5m2sE+z8 zG1ZEh*9eoNM>*Ooh{Spdr)o`!nfN z`rFEM-FOgwEqm0HCZ3cf;ocd>v}{LsyxDD=>>?-7*I#oqRYwi8!>eDSHL>MpKE!+7 z8B~ABZx$P)SPF>@Fn$VP;7BR%2~F>NuLAh3ykXj3E@E~?LVN{8J zi5{kTAsp!0Gd<^)jdh~-(2^dLkFs@gV zwJm&B9@u3IpOA-vJUqxlAQn|kjNRo(+qgaiPC~J^e4W~#*p*Tl>YIH2s%c0fo1SM3 zm|&?dkL~W`cd#he)_}M=l@zB==%9T<{?zUa#sU{ zRI_*$J7V6<_*AZ)^Jxd`AV1>40vUkMWd{BBpxV7TwO0#`yX-$@HY1}0GxPgiAg8+~ z7eLEAWw~2E9X$HGUQz$X;WA3^LZe`K%Jhch)?VJ`X}r5&ROBR1SG13f4Mc%Kezc68blUut*lKE40@DYkSdgq3?uuN=njD&T+oz^Il@*t?8@lGB*)eF$BI^PDb%EN6eA@})Cd2B}5 z7=fp;$@ndaM2d```zoB*ypm6e9Hq6UzwtmHE(%OO|8>5EFEFM&1$1N!mi0)X&k{|6 zs|mEGr6iZ~X64HJ5%K9;xx+c+EFt__jsB0IzO$y{Ewyoam{+kk+(tX))Vt5QWuX1;oq-w031vSm0@1h+3-uE&RYYC~%e-mkYu3rD<|1m`L=+v0$zy zT6p>xokzQ1=!DvnncD583eP(h}kd76Spzlp^5{dJR-cl44Q+LaP2Mf z|FV4JesW;C7qeezOrY7!f(wNscy)*z@vHNVXNd$@U>SIDa$(?0aA*$wNvcuA37dab zM?!V<`c`8HS^hicd60C|SS{Z+ZcL@or6V-?w_=}PD!1qW5*P=M^IN~_DQ1>2k4XX& z8k#9QT#edHYxKyFh1+1Niw#5>@Ib3Yv2FwNfkttI&|4J4V>`}_o`iv(t3w|vhPi3f zX9|~)1kH1^BHS80K)OuH&-e`K^+C=XOmCT&^_Ka~zGX_gVIs@P#u(3RK)&Fu`ywm@yE&{2=t# zH`*?7hz zy)eX=_TMC3f%x;P1u(SPEK#lJBo2l3-MeK9w|T^6@n^=$xnd#wHh2=b!Hp|9U*#E-A#ENKckqDIr57#(i@Uq; zc8B8AmOvc_Cy&X53)16_Kaz~uDtH>7x3i1Wqx8b-NgGXgVuiDK5UqO-->!7hEcbje zq#qMl96MFTzwdmU50{=y1!Ao!;#%{fQRT8DGjZ-+%pfz(;j{lm8m~%|qc?ijt_xQS zO&xbEbdP-v%w7MV^bOMN1Tb_W~Sjc14C5JIDHeSm@gjCyM;%B)B4`~TLZd1+2eE40|+~x zvphYnIM#++`(+)T{R z&6pF-m<7ZE`q#1L!8RI18Q8VU0&6wH5Y%pVHn=$inC10+97D}EhKIVwUFg@@9BZQHPI8Z8NKR6xui*Db(SP&s<>KtUlxOpT{$Wk1`8*%zPVc6)taY8DE1BNLdLP$J^)T+e5rbRbBF` z>re4&j4+FTMxGt!^K*iVv(B>_buQ=IqXDMXVuyOrprEkuU{Q|&})D#l4lG9==$GeYC?#zRQ(QpLGghn zqIKEn^IH#z1Eb;y2~UJ*A7%osnywmv&L3oV#pT-t*es|5cI6 zym#O7smyJre)ZBr67;{lt>Q4S- z&&B=d5gKz)8Y6=2MEFiH{8?kE1e?uU19~*u>~9O9Ifu1jHoOA|Qry)&)cEXIYBfj+ z#u+eqNMZIRmttd|6eE-gg)W5}FZGFkMmY7;3bj! zgQ>V6`A|VAd|I-QehB+)uJrb7>9hmfL|Uy8TOl+|xTwOHAM86j?L=_o&rQ$ldv<e<-zPXt6azB?GDTqpnp4z@v*sv=8Z}3a^MQgwifRi{+mrFDgiNY~ zdP((Y2=r!gq#(A8k14DQ!G3{axEP1`3;7r~t@-uR@4*H&PL-k7x0?wj{gZMOZ`CbB zVjW*N*JNcjS^fUQvW9{JQ7lnM1>En-brCJ;$#M%2Wcwsx=Yw-#Z*5TiprUu-CSU}> zzht;9o9(8DfQJ>=zmL*(Z#{i%Tx5R?!qz~81<5q^jur96bj#^~y=H7<+s%6HPkW_N zMTXle-7>M2_Sg3j7Yon+yDC;`%b$RnL;cV|#Rr^ms^RP)`?~8!fg(qN++L*)1*%a( zi!m=UrEh(of}0X>E`9-kMINcpXW%SMq zv)=G2f%vx$_ATTG#}u-spHDY<;T_D3UjMkpc$p*MJ%}6x+f&KS%Rsd}mBPIunX>29 z4&x_Zs8h<=KWXr=H^_;hYV2ZRxo_LRsjGvpGO<)9)z*IUZG_w3wDNziPyWlz{AjAg zto*9i*|EVUPliWXNB+zE&VH0#IgDkj~+& z*M!TZl<6Jdp44clBPD?@eO}Iw$=z5(Zb=WCe63wvV3?O+@2l$Iyl;u~Bg2f3*=Q{x z_A9^U(1srIom{xwlWkP`qG+lhTqRug;nCC}Zfi>o0A(M_=X%%-oc8#%Lb?d29XT$j zUMAACBk}+zW7@ks1fowX$1L*c>Z?HethGobQ*BI1rW5jLm`y8=}w;xas>?@n_K z#A(6kSNT{0HJfIF+&JHq07e*=nO7vE;dEn=b&0B zT`w}5J(vz)tt|ARdE@T2(z;Q`IB8w26YB+iO$fh*x3P6(I@3c`1hot#BC}Pxx%Kh7}S(C1tL6&JEX#5I>dqx#9<*tL~KQ;ln#q zKlz!8Q|rj$Oc$_#_ymEl=Xm&v3=>NThE-B)y8F^593_NYg~m zPZ6K!@OKI+`Qj}WbMRbf1QS80$Jb4EhsV}M?<$xQIYC=)tBV@nHVd|83zj|OmB)Yu z???Fx}){Gv?yhByPw?0TGl;^&REj0i)q6ck5)mPnb=C7@x1r+TW#!=5~u{hBq-1a|XjZVO_&LU5cQtX}1(QAg8+p7BAGAu#i5QaJb7=qh}p5 z9#2RA>Z(uSs~pCl#)s7}Hr{{VL*5y2J4D;jpZ$FnFBL6Y4u<9@5Xz&EZ@)}0J||dl zXgba}wfLp0>!OE7EwCVlrlNOduC{M#>c7<1VZFkGnp3Z(Qo zr@B*&R>ig0fZ2QO8QGgQ81JlO`n4P9PLTcgWHo?5^C)6@?Oe4?`30`mbcjEkf&?LzUw*<;|W2~%pfu+$PJL#oUe`>FIB zHm~Em;E5f~%ae_{0E-2kDZS-CK({vIR^Cm18H&Q}!-J^8WN%v?mmafMR>J{{9zF^Y z?#|^%-vWzlw!8G5aj?j0p%7`x>yWBK8;BW*$xt*~o_2~y0>DUZ= zjr&V2dM}6td5douJ8NOrN%?t~>La@9Vl@e0vwzL9(7Ds(PN|cZ`OdwtVb_sn-oZFs zeB0#}t_hx8ySUB$jnBk6tF|hy0sojyBLd+wRoO>KU?u~hByf9SoDHK~JUBJdCt>eW zvuMBZ!YqATQYl`#N+L^0m>1QY@$ht}RGtK1v9XjFO35oF8{36!vX}^%#brv*Vi$`x;GHSH!JL{v*)7Gz5QLMpNO3*Vv7a8&rW+uDGLZ}L6u3vX|@XH8M$ytubS zRtMHw-h5Cbb_iBE@mXs!rMG3XPmbQxp1;tYnc>LHum{~=*H>a0urVdxWd)?A@lgWO z;@!=@b1r09b~l77r`buGc4SId-Y8R69bZh0*#Lh`v^^8j?xkxQ2>PlroAX4@93w?P zc111M3K(ZzMOrv6df{#iE3A#w!yvao_SBdFF^cL2n6 znI3kQS0$pSL1?01=3ZNWyYO#30r!{nWy=O2X_{IT8I-xivCf99s?}7di#L{CL?s-O z7QUSYEsLRIyDS4>z?CUnQiKo?f1`9$wj>eK=U-w+`wiC7mkS>k5WGAe5DZin5f7G0 zt7YT3^HOu;kp8iWy3h&0iUtqbee?nP!Jzz`Ss3iZ-sRuJ;48Rl{|gvsQQ2T~a5zU> zj>{i}_IN~zIHQ+13Rbe)=GDa@fm;je#AjA(K>(m!XaNQ5up7%<6aY{H7E}UTjwq!DiSGjU(fUc+a(IAk zfap+Bq}zdZh6*+Fiywb$X;TgkNo9G@BD18%Dhrd->SU;k2pa(&*9yCQcs367nRdB1vtKU|T9%(~> z#^iflZ}V2jlBeaVDmE|f2PcKK8XqhZ;K)?`>=F~+0!F4H)9`ytYRk2fOh*NC_EB`t z)Lz<$sq5nMJkA}+jzoEOr11h9a@3V)tS2=0fs~vP*mJ_}u{W_a>u*7p6w>#jfzK{D z=}Ooy7IvSh`@Y1FbEvX9x+_2IXPYhg2Ze3Us1H59%IbV*i0|hLHZC^nN^5?E<@y_B z@K6=Jmkq{&4`qW+k2c0z?U~Y>O?%Ybm!K17oBAIR3po^|^kJ25em^t3c^o?Bs+IZZ z?=IoljY|c58<1;c-^A|hxbq6}3KmIhm$9GP(OYT(!@uN;zVBB;bQzPMv-P$Ev z0osGc?@y5t*BUuqFqsPN)FAsJSu3j>w8k%ihoLF80>#!3P5whQDV3(*cGy3X_TuVW zDkp1=>q$xpEh@ISi#??KQayA|d8YJ7^UOM)Aw}a3^jQdxZfH1?uRZWOkE%iJK2yIz zi{8-ZZV%Btccy-p-i?2?Gs~b-UpHQ?0*`o+_f~;lVv#p+p!Ri9WVH8HSwUVjeICvx z7?xSbca%4xYYJ-IAJ-2~ugsK&uV>o&5ioy>z0kqbax$R_BosWHbg_ba5<|j$D2eK` z9jRaPm7W|NrZy#k#W~dt;VZS*HZ<-Ce*ALTdYTuySgN{c4YwyesNo`< zx7^4#W2VYDhKz=bN2X?KffV5XumUv*)7c<|SwABpXtb6S&6cKd&q_tYZq@gVOBDDy78a6H(}l=nCS^XJa1LEsacg*XSE;vxKm!PY`g zr^EQ*9Imx!jnB@%yFlOW(<66?;Wk_7K%Wu;(Fi8M29#vDjPZCfpw>Bp_W`Qbco*vr z>=5jg;n+BRUka0NizNzL*ypuZ6g1tr_#3GD^cMx8NtRGDpr6mAA0h(^gV0gWYCO!k zY8#N116kRO)iD8~;&SHXK9_j0o{f?-XUzH|%u&voX)dr}rR`-cZO zDMgiPNaJ1i%A^jfFgD$cgT#EyB(QXlM%jB_b(8jTt=+> zp!Oq-7XDq)!l#i^b^p!gQF#yeO_7~Vf^ky9uMnE{Scop_2T|^Wg-PsOeBpls=dqGC z-=!8*MN<(c8I4af(7n>NEL6YSEKvPd(zW`311qszk`9T|hH(O&VcDH4>8pjvJ_Uw> zPu~oT;6gJ5EX_LN%H;#=dC|9IcOi0KKY`oh4=9EUieCugF6$MIuz#$B~i62-n83d7hl zoBA5d5`au1W>cmor0!%}p_Yr>$ySZ6C25``g11WHdR3Q#vncr+V z_wVP2u|LZv%WFok*}?w5^@qKNw!e$hpw`qQQUUykJ#3rJqX4XSc>)uW$CwR^i;Z7N zhVdjxzb<~TUJL41pi=D3UzcB*9_3p$%#VMuLafo!Ht!3SzGkNweP-GzYRT^Ak#~$} zm~#HzTH3ZQ4~w<-hU&@TFOTLmOg@Keax-3g`lKK{25ds7SI^1Mx18Tyk#E!lWZM!R zfwIUE`UZ1Zj3SnP5&&EHlz&KAGk&^MOv0sWBpOOcFJQiW7KoX=FRxsE`{%N1zJ+2> zEB06@_EMQky}wd6k`QsY*0hh_`Sc&yQJwWhGx?En{aR(=;)An{Lh=Qe4vGNSvuY#D z$(VN{v#d2`S1Yz7GC7-=8#BblP-V#_t|D>OR$zMIK-1RP)pofim=aTRU9#~@wIUcG$wDl?)#yg37`Cj%WvBl7|h*2ALADE3z0DI4cHEnJhCI(EIH)+ zC{#6%8E!XTJ%JI`Uyt2p$2ix;!KSGLlSrxC%PE`cFOllGLt-Fq5Zyfc^<;4V79aN- z057p1!GonQesjJbhKPIH-Iq(R0&)8j(zcNPL5Q;ix}WjlA6xC!^SzVpgCj$|IHVAt z6e&B|?n$Lj;BrTA1-=kFgY#ST(Eawwp`Y68ZQe{GOQQXa%Di!Z#`|{{W&8B29!BVo(ciFbua4cKe9^aV z>(aMu+v6MDX%zxVQ?Td+O8=UKy6*w|He>L1>zfQctT*1~k=d^wH3tV2*ZdZP?$ukT z$EQ~K(JrRON?DH8UiYEJ1N2*7vrC&ki(i5(oBQqYOJHqWbw73jAxxTXDLa$K!D|nd zRr_Jy$X*@XX6Wk!+qy!76W(pRxv*&AiNZsUU-Gr?yPqb}-gF^8@kDu=%`A9ezg!)g z2)frSIx(P_6l&Jk5;bV)YJY8oT_1FBTRb8T89eZ=Hjc>vSF6hv z9NfyHUW7Zq_8?mVO8oAPi}pZnNT<;o(Hav#nRB7P>C^D+5TLx75A661zgK_1u1*`* zNCJUJK83@UY}Z?S%n@c8rUCa$54`S{dTia2-ij7+>_J8jILer6y;TuMB6qU>GQU~P zj1bXMjZf-nRrNf?aMr}Hvnxo(i|_a9Grsa-Qf2GJFq6L_eSPc-JC1%`s^Bg3$12;0 zTi=i{7&9AygG_|X<{)dwt0zLXgVyjes<8{Q+yM%v z=|xgP$My3gf>mER1S{BWyPP$#=h#mm(uk&n995Z1l{m(dO=F%gFfnt7&xQq_zR$R= zoB{Hc%`E63kDVcnYeq5{D@fsMX0I?AuPfC6LL|cSsfls-`Gn+Z7jgJ% z4l0CnjBn?T=&lmmW^ZgrplN$#Qn2a0NO?ei)I&pIOwN0exe%?hGiP2mwl&Chtu_99 zD3nJBf296OW9rjt7q|Ok_u4t`YK?RFq#n-8J`~G?XyAW{v?iy>y~JyJ$eysB+s$Oz zL7THDLYym?(ZcQ2TfobKAU?Efnv!@TT$b$Fp7l%Z%&QsQ6P)ly@W5N43Ed&vzMgkR zC;Ss|e(J>@CH~k(&JDKh#`A#3`0%v6ynWi4?Y>x~F}{=q-w^FOdSLtLL@@c8YXUPP z(yW#|N5Ac(xB9EL9(dPldpkC(K@fyoI?n}DP30icJXBs|O=UdA7Zk+)=HkKqmezDJ zMS=jh7gzVi8z(o$C56_6?q(jwF+R+~KlH_~EAWX#B&dJjXTyl@8RtzJh57++#eR-b z+xB=XJ^&-G^XW&t2j1rD$AsXWmWqRzB^LPHA9%E%wnY!td$pe?2_gfZ&-dUa#O7VL zUM`*YH+E=EBK(t3*0A@rh4R#@ z9eA=BpkEblgf`ylo$$6G+)cg-AY8xg4e+UV+b-|mEwQVb8vX7!!iD;*hV=7Tt*{*z zM{kTb<|!-E$UkY57sGd-=I@qLd!oNpP%~$*FZQY^-vRB0wk81`FL`ZER}rw;=A{-9 z1(@;`nBYIer{m~Jz?3TV>6yPvYJbHMIeKr~!}@R6UiCFK`e2kCc%u)zrgNcAKX@gM zftOz-vcI4(ft3(P$HEHlQkn2oe)pDzmn&c;bHJiL=~kjIuzV9BRT=sX-U)Abw{_9H zZF_x#TYNDRL;d({oj%+ED2V-;nZ-@vf_$*P6^sNtz3z_4zCLgh@T3R&@kqw}%L49E z0iy-KUf9L{Ca_Cg$OK(_qyUmpYeYBTf10eq7C+8p?H2G_lT}SbnXG-_s&8^1^=c2d z$aI5WodE`e^knY@^2vOfuG1qmg}63!@5#xof~rnE>9uV%d%DqtL&DqhZXm7}iGOqt zFgdW(XX{}d9F@)|KQGij2qgCua0oE=dEw%GG|sEHd6sSR#h2u7>xwmwmE|Q&C&~vX zGLc;IQFs(!1; ztJBO#1Mav@py>yywkAI-)G^O7p|p|1P4r_EO0cT#?S)e6O*TSPr|0)1GG4e(m>c66 zYvX~XFeY@hB!z%k@j4q9+7-<&vKSBSf}Dw$emFXlNg6hAqtB;z5lod0U^_5eA<9yI z{S%+N%cosp_@IS6Lg#5OKX_luLudoN{L+1`gy``e6hc?EGj7NBKn;6Lt$$;L9LnM) z_fQS;r2_6iTbUqJApYa$9*{t#m9w@D`->6x86&1L()trJ;9mUVY7hLHR_F>Jb1c;S zRlrGgDCHmt$|hQ4hQ;BRtIvAmp(+v5VPkbn*B?|<1G)i1XFY@tS6xkynUU-_E@%fo zj=tX(40m2sm04eZlHOz@uttTnEZ})FZQ_0;TlKjK#d5Shrj}5xoI11z{vvICLMCJl zmPAZlzkb9QdqPTN+J-$V`c2t{*lC#4RmnloLQSBUwfjN_@l$qUYYxUU_ z0ZgMK+wBa+t8Q=mzv*_$4i)NmqY8HH(=szVDwzO7; zR2Q(S+Tj;mNv~RY^Xi8WeBg~9st?!6z?=l77{5F}`gD_I74iL0TYD@nwQ-xmWDY#~ zi2E2sWJKCuxip}2y56Xg3Xw?yROqJ19#Jujlw`vdkjv|C6`KTFo_++Kz00INt+8G} zC|=*GGh3mTBXX=sRi}4IJ=^rgxHN_GV~?n(T)BH)`nzzcUocCXNNK7HC|>qbt?6UN ztQVR6+vy+`7}{hG>(+GGHkIkH^WNNac=4XxbnxjnmQD4cOW=7fpB+zKQ0K4s2djF> zNa0BGmAh2gG$1d1A=1?f7Kqy$Y>5hPcl(5@!18+IeY1azA|(Yh0B5zNw(vePPRMW^ceZ3gz!3%n)VC@v9;B|NBreNh=G6f?- zIya67izsxoUzU)tY4^&5U||y-{f|9LolX&6_fcdGjeh;3zF25hqltnbHb3@ievS{**+ds&S`~`QKBcH=OJ`?l&NPwp>&m+Tg zm?tclN0uu`KDE*<%oASJ7wd$tV?l)HGj+n(_rWy(Dy$w@*uT*hg#G|$vF-NSc5PY;`YGBG2TJ*ve^9YO_2|zf(ujyCD{rx{)Y5~ z=x>EM;q0u45~)GW+^PLXEpenwrY)U^y!$u`n&CU>woSB+*w+zFKSyKdH4S#z#mqS?gjY6 zDVd`2~R5(>Ndj=Qn*P*?jYb$^RfEDI0nw43t!GM^2&BjB`0#_hb>uknfJ!*R{vD?`71Vsr& z6q?_zUooPm1q$dHRR(bwD(@GM&iZ7;Ar*Hz0yrOGSybx*SEd7vW$y7gT@WmHUn3#K zUgb)MQN5E6&`FZHF|ZFf@Xl3<*SK3CCB@;|qLUH4LY*aG4x$VgG!+%aA+%T*i&A>Y z+AjP7deKQ(DSU(urIXMxoPLSxrQfR7c1MFFoETS(@|p^7C`fUic!#m-eRg8bLtIw` z5Ih2efCykr|6_E(l54rd0ubwx^+wk(WTng)p?7sP=c5@Q$!xsACdXbQX6ug5A8}MlDv+lGRvPn$ zi!+T2pen>TMpOR!jML25$Y2z#t0)?@$4q6Pe(E4EO$w$2Tj^jP0MW$;-X`$yLEeu_Oj1W9qsa@hZ(o$vKzjI zh#p0dg~DxgT8Xeww8o#(CRR;K_5qpeYL3z4o6+D|pzcMRrwn1Zq6Yiu0FJj08o!?^ z6E}la%Z}7jc zd6Y4MVzYjM4=j)sZb}8KM_~zMxy{=zeJJ!ck4krjDw5u1c87AQhp`aaa=6v|RKBm{ zy9pmP^PV$tjCfkhDN8AL-V0#ApZp$O6!yEn8+%akLL1{ag%@wramGyrvVZ%#+WcML zyt6(mUb8>E>DDXJSFCBr3$Jl|xOwhg)GFGIzpz9=N262^U@|pcMpol;20Nkw?^V33lSG6NdR?{o*7`u4oT@$i(d8>=dB1Vn=PxUa# zTYY_g_%u9(V|3v1R$pd|oMd?p@>b6oU`Iw9T~`>gb$hFagtygH{1a|X)OiMx#7BLf8rGto{&1<#!qdE?xUnQ{AB%{7U}j!^u}N@Xt)()@;Og#PbJ z^F?N?a%#1{I5GvJg|e0FL4 zyee*PNLS$|I4COon~Z7o-=?+yG;ug4O@@bKFU-eCi zNNq@upfyFOWKnD~zWfifh%Smi^0AB)k@dxm;*J#lynJCmWe3u%Z{yUr$n8N}Pr$uD zGQih-Dypmu`*wMXjpV8Z?7R-uNuM_xU#Jj^3xfyQy-bbV&&J7TOCvYBWR z{C+RiQ=pZMbw5oo(-N|EBIR=g#XWULWH($AF8jiX+gu4|Zr7-Pdt@?xwZP!|>iqZhJ}y2IWY+zsp7#6vC%#P+5!9Vk|9m7x(TImoS? zsx8$c@>yYMmSS+bZh(vN2u?mB($ajjlzJPb3Sl!3diScbc}lKzM6KdsvVYDqY9xWa zA$2!+RuJht@pdz&j7x)zbIpcTNWix_Wj6~-#dDPGrgwn2a)#VDm>!|gT7h<_)~UCc zbWau+3dV98r_yy3ai_4uRZPHwW>hAi1@?Gfbb}!f5r7E~)*yfg>03g!^~+%8z?PcQ z8orp1Q^!Y6RWVbij_0Zc?srVeZ3~#4jFRX`ip-}7u4z|yKk^~+TJuT9v$V!!A96pd zRYlOfo~s%(AA~JREfyBe(rG#SZ{;sp{BY6zeE~$4jPZ%a^{PCzLsfkb*d$WWrAp=G zLt_~u+msNsA4E8T*o9v0kv1_)XgW{05TFsRHCN7q)C(E`!3PW7& zWrz0i2wQWp4TYPyCqF1|9rgw)zDARH@vlR0_NZ1UIFpu>Ea%h}z7{jK$qJ<9Cv5$fd^5Faoovh}%HX>JnqYbRUUU(5^uAaDjH8 z+(sXE!={~!j5rcdS;y+hw9z`wafq&wDQCokz_V9RH+vuS_*GHClJR7|F{ zg%NH_+VjCIxT26?(Ooi-&rf?L(V63teeKkPYZsHs3j$Ak3ZU*=ciNPcg>{6Vg%86VO&{g3(I3+kD=huA9?xdOKLv2m^+ zsi-nY0Sf&D?h3ua_Idx}(R>fOKUh4x&-abl?-d-Yk1jGlZB|*ISD%)U^~l003My}_ zDTM}Fh)(^B4#|Z*O)OE?^Zpd*l&gm+`9Qskxh6H3jWcVwT-6Y!h64*ntGW19zZRTr zeOC&LoQ^jz-e`i{eJbmq-g#M!5xq2KjwoaeUDKk28T638C-pwtC=vC}OJj2w_uf=w z)uR94Ye>(emaEX5W^xOoOgp(IS4~6qPnniV9Ye*V*)Ji--XFs6=GZH1fm_75!^gyH z>Zqr7ws+;+d8RA|{%j48Fdk%+%95q?01LV+NG;40g?VUVQOHFO;Ws2}Ava8?cM46b zo)VC$C~OfU`wxnUi)-7>pV;b74B=}M{P)AT8QBa4;YAO!8_`H_$1Bw+J34-@|b z52yl$T-Rm4mLkyO$ND$7wI)h<&e3dpQy98i?lq2$%M!B$qif^W;@K$ouJWETp8O z;CMh;gewya+qT_}SJ_>Waj5qBmpM>Ah`hrwNZ5G-?~`G^gO6r&7D%L1#^)o{=EqK$ z0;7UBu+tORd(=Twk=-MVlYp{?em=*z4<5jG1iT+}twueHmKdsrIF2-&Ei)CZljxle z{AvU;8`2YT6^N&y^#S+c#YIf>9-m{BU3nf0RIJC4AyDxS$&RNoE!%?Z08!H!r<@Os z%~(BFb#RP)B+2<(o$FHBbDU{|iGj1w5LR1f(AJ!jDxeJ0SugqFUV2Ghe9G;BIg#ae zSgZvO*Q(D&a>yPxmmO+0J&g=Bw$0)BQZQ}T;>nB~rC~qu?l%>2QZC!pqSfkoj4Wx{ zzDg!ik?fxLP>VPv*pep?jt~{e6mpL6a?IAm05oYZnU)`-( zApgJYAxl^k!0*>=^-|#~Ld!1lN8cez`4#K6i!{e({vn9;G4w+*RP0qd($}z}&Zaer z3xVh+GRV`oLezO9Meak9u|1wUxUS^+F? zBeZObenO`izs2scgZ{zz-QLf(sHcm5nU(IUX zj8kMtgqJqdSe7eL%1VC|wRF+Lh2rA$!GI297)@Gpc7>ufQ&gLV%9At{5pIX(V{G?* zV2$mDzGK^PF)i#X|z_7U8sTfCHiKlQnCe zQ8)$|i>M}MGais&25eb@<8_uV6l&tqB(ysQ@pN9EN{ef2;|J0-aCN2e5(^M$R8F=< zN(j%JJj1SdyQY8SW6HuxPk)D!rmD=<#$qzlV#~1#zH?wvNwL*Ye&gUC8nAXr5!q>6 zMUC0EvBxO&L5DD39f6gkk$+1BlVDk5=|*~Hpe3trTf|84#B6N$5{Q*W-2b^gb5ulXWh2zLA1HeDo%j>iy9K)m2>OZ6`ttnN21(#O7onKd- zR=@4Mx`1|XqE0(9_H6PIYt(61P1?^vvQ9gDQkqa}Y%=q7?$MQ{NCh8t+xYmrsnL{I`uEB4^4*Bn^lH6g$qJcc=T*^jO8=lj z=U$2|Es^4xPV}k%&GYKC!;>oLB(vtr)ECg)bq_^OrWGakqqg9~KRw@!y|~*=2l~LL zq}lxXTlu3$?RU6N|!cC z`Y+NY_Yg9PdmiI;|D5$}lJ(4PBGyXI{nsYf9&wKCkhDjflYUINX7?9=82gO%FzF&5 z0B4tqy-y{(mX|Fn+t=*gwQ?f`l>GUAGxLXpYj$`1sdo-9e8`%^YtyseBy2cK_@S>fEwscj^xle?p`@ zO!}jIO<%iZ?T%}A6K++{+pgWacE{|0tlcr^BY-^icdCRrt;BVau4I7JwBy=|LnPdD zZJ~PKbM4sAns`4ldksH8GrP4w(+1)9dmvgO)z@kP^ay}ALvt)wO?imACui!f5*Iw{gNx>9DxnH!}zLJh>E)$ zdEp@~cQ+z=n5z_zi3{UU}eKAp! z5X)NX7fGuko#7V$v&z+)2Rpnuo8vq7^cEQmgsvK5*|qF5G)b9zVFZNO^%v{f@Ule3nyL zwRWGPh{4BbSw-|7XTic9$V0e##(zS1i?4cKet3g-nKs3-uy9Hsf6?#6r-HBAmEJ>p zD^>De<&ytYCI7K^Sxp}42a#U8A9)_L-ORJIk7=zESF9RDGgTvg;{t=4_QA#E_F|JKsh}Z}3 z#AZ$AIQP?-2EbyZu1Ik1T8mJ)){iQh;?(*`tY5>c!1ZW z7({0+po4J_CcCKJ^|ydN)@1j`L#&R`J5NFP0H<#OZIb=@0z8&p#hdQG`l>+Z1A-1tq^y`nJGR4g7w?(8=2SQ}iAal=?j zv21ENE(ooSXnTWYT8lOY5tZr;*XvnN3~$+0#UR&Z4ERbJ-kQQ~_*pr3V{>}NJo6N9 z16;~UPaT8eC4XbfrV=>TxMc@8wT z@C;J@kf18j(|S?(94_8vwrc*QSxP&lNU81i#&;wr=xKFHm@kFfwN=J&RcNcWYTxkG zCA6LWOnVj{AbdOrMJA=lWIpRb^SAWi2#UOpo}^Xedb7x+RbEv&t~tMlej?1atXGV|Vo@F+qL1Vk}cIz_aSJcf=N z(+97?m}~I5RUTU5n}7+8;R%oNE;SNMRMO}{Ph=>N8krspVUlNq$XI2J(l;E~GNFCK z{=u8v=wnBz`ZXGZGs;ammSX-LB8(C^4fGCv&wRFr9onjNk@->}AKo#a@uub-{H{D} zt9A{q=Vhn7=*fxFz;GFMV&N0L?ls}!X%Z0`?j8I=x`?udNK?`!l6dJZX&T+bZYw-m zg$3auDzr)K(^`69bb}{dU{>U*p0=r#vZwa6dcxm;Y4eyJP@K80a5=GgBDad}Sxxj( zqN_6T#ZGfXOcnY`wkOut%4`^GZL!}Nv62bF)!Rf!DvCD0zw zO{bPHDLh^L2^jpDy*(83bQ;wxS(D3s#yN_$F%al%V**o($+l5fSZ8IjIejg}wfnG= z(+|m97~Z(`PED~QjT;38>^YFWM)@=LG#&!sYO8h+sxLBv>eG?*R8LP^e)t55E~?j* zGNds{eLI?V@vQ(S(R>?RKb#jQem`aY8G}`duT|6tzJF9)U$W9AU%sHV(LJ9Fc&e)s zO*<5aOH|H)l8H0*HOeyf>OlPF4#bn)cI_Hi z97Ah6{W@%up`P|OPvR8vw|Q28aP!iW@X6%?$H^V&d}rS_uj=I)hPOEE8ZW77G1vgW z<`z<@?2Zr}xx;u8$ZO^6v~pRT(KR<`fh7zz^{)>I^#qN05PZBjdb&PYl%blxHbq$TFe;%R{iCMK?c zN%no<6q~K|08yWI1!4h*^3kevPPugMi>j@w*#Qz*zoSFsgqd#!#i(F9#T~lex%x+Z zj|S}dL={+3@UPNpP38+?v|I1E0}S><&^ zyPW5EsXqH*P_oNreQ@;qP=qC$^}*in!(dy&iZ39Z(xog^MXktuB8?ANXt>D5-Il5< z8Z34`!!9OCd3HSGkY~R$8Ys5+v^aAw1wAdU+)KW0TbVbHawa6h7jlt=3w(bnp@$Iu zc9P4eqzH#CeVTf39W~zz()e)Zg9}c)e9uS6R=s~E!P1X193he)6)ts+%R9;;!S5WT z3c`U*bu{40kKW_S!z&!|t9HqBsbxL+D|OXgRDk6}G%ydnV88Q}+IbS_Lt=s2bU-@i zU9pEey4U5Mkbq>0c~xedJV`pHMW5`moci>6Ra0dIPol*fJLt15=1hs*O2c`gBJODANWL|P*N8K`?}V*><|-dee;osGU)_5+V> zm(a^=T#AktJ_L@qOXJ?7)UkM^aZ*6o5VFM6gN+jPBG`cp)WGyE;44x zQ&vmKJQrAMYb-@zDMD7`1WD8lrm7ZFYH7z3%qAhIjtt(+darDlcK+X~Aw01s=?S}x zLui4ePh3~%l1L?D-nH8JkVx6g(vB!^Z^;`+6lfRelIqP3eSE|Jj_)*Xa4T$cI(l8| zw?vpUyDp!)F0#6_F*M&hRdo1l$&kJk2W_4axN4h?^gl6@Yj?4E44MyL5M6 zcTN29ylxD=u%3|U$mb!Q+(6m^$ehTfG7}A!W1lYb*jpc(i2xGKIj4620*dCzwdm47 z#g=*$HCq5Fmt1HMUSQ+P(t`~o(C+^(9ne-yoVFTrR*U|LohPWb;`@e)6V$N=M(K3l z{--fZ2{ z-sCFp0-}7)o`P0bfX$B9{o3sX`?V!`t#uEDbZn_Fb&%-NqEHMEq5%>Hih}xvR=tP< zbd}$(vfD{7@5`t2UdK*tZ_U0-{sC4b`3C@>0^KaOm}1RRi>>-zz7*|(JWMEh>$j?I zmG+R&%+iX#nru1CFSW{{-T(z<;o0XqZMbrXnNub5HP;SdJnTg*9^4i=y*;g3csR0{ zKy+8P#H&bc=wOs^YV>xT$P9$}BSU_=w>#u6@jlD;ePXJGnBMW@TbFivP~~Twi=2U_ zNi|JQ_cjp!5R_)oQ}W@tV5Wn_PZ9Z?L?$G%lb8fCof6X}G2M~BNRh#4*AnDi>Edj= zf>Fb1Hj0~X;ZbM%s}1$4x~1*Z5c!;(T-q3n+Z_~Fz_bhS^D-A(0iP#NtDwCVw0C4< zk(^_te5p6r?h4vnacsLQXm>@wc9Y#B*(+#&MZfkxp*?#rRSmLG^Dk&?m%eoeqA92J zp(m*!%MlG3fjQr(ekh6v$(o8*3+Q2}!TfXyUGyo~JGIjXsWPOUU0l**!FK_GnfHK$)Wx(e0VmhnRl_;co=={D7Mht`no7fZJE(;3{sNd!Bf7MyByNQXWS}&SD0* zq{*+-wxAlYnQ!eB&P&SFVHXwc%!>C zae*G)-Hl0*D9~!xbl}(p>03da7Dhh%cEUR$$>lWB7H8i`y7-Cq3TpH%%6u(4ZT}zh z@(MIAfMjT2ZmHoKV~P$^E{-&=*_zzt7>s=QHu1i+Pg~HAjd4fDtf!^UtA(lTZN4vf z2bt#Wt=X7P?}RPh2_2EMv{%;?FTd8HZGF>LEC-T7X%ShAvZ)cRzTJt4W2=tCp9#rK z`-D#N^`_V5&^Z!ccdzBUTWUDicpE0?g$`(*7mUo&0( zC$ndkNd}>D4*|MGcNoLQiI$g1e(@DAb9#PN$I@z!aHg^wWLckFHBhO8RPt1LAw*1{ zPx+LVIK5wKisYh1|7-|YsU!9Ivc8!VXTkjNR=*Eej6TL3%H~>m%tyZv#Hzd}b6=pr z_pTgFCO%ZoP2HdS0M)m5CEbw^wBXkMAK+P7AFk^EL7gU54tKu~O3Xz$6Z^$ByxKrP zTt#7Cf#Tp}AE;*GP@^cRxv5WWeW3t+a{YN*@B2lssWls&=oifpnFACqURphtf#Twt zM7mdhMks0XU*F;BH>#@GRxzphxBs492T{VmbCYIxu@!#H3d2^|Y=uu-VT%>MWrcgK za6pS$j>`%!u)>fP)>+}*R``$={@M!HSm9PH+--&XtgxunEPuQePO-x8SmA9}c()ZU zv%*KL@Yh!Ok`=DA!ndq&w-p|=!XazS`pT^ETq_J%;kT^t7Ast2g|Ap}z2AEOxfTA- z3SY3oHCFhB74EjeZYvxhj%xWg#tP4}!i%l&N-M0h!bMitY=w_l;Zs()$_n4G!q2Sm zs1-V`c8|BhDOPxy6<%+J3#{;ND_mxUzqCTtX#R|}=0*1X=~MF*uP1R@z6!JP|A)Qz zfQ#zt+Qs)E9h9L-5oPSCfT*Y_at=)d1swqu`%p$eDdR9G7>%GtQKO=<#fnA^c4A4; zsKJiLf;HH&L@_aDKve8^?X%Yz24lXw_ulvWz5o0Be;f8Z=h%h37grzTXs-KUT?q)QLr?O@83inUvFUA*9aUJu!!O*`8!x+Tnh~RC!Oai}Ny!LENlA|v6X=D9sFg}8@1xWV zj!nr_3Q_drQTpL<4JqUomX@qZj!j9PqzqESQG9GllrmnEoSxP@ODsab{O(Mj1&D=+ z#fYVd<%pGtwTOm3EIqtKSQtN!h;E4Rc4Q$u5HT20ju?YD0#SjOfS81sf~Z2wLY#q^ zhxjF8KH_4;<%p{g3lKLT79ws#EJECaSd4f85gyqT+I0-E6!8pV8RBKca>Sd66^IWI zD-oX{Y7uJ?YZ2cfk{+mcL>ZzRq7R}RQGu9+xE!$5X%usFP5(xVhmyyVgX_)VlASZA4?aCD2~GzTcdpo?4lqQ76Z1#PG~CnBIwL$^;=ml+udI z(v%ZfEI3Y3rYJQ^;s*g7M@TRD-L~d%><0nO!4DtFVy3H<>R6}`WgZZ(Jz$3paO?q| z9)oWq$l8;RfUUtpR-_l?0nrHu=nm-Cj*!m*M}eOVz6~Ur3SOPgXbVo zFUX(`98U&63cO;%7)@+kin1-FrA{#+?jue$)|MzV*(zoF7%>`Az}gv`BFoUk!eS>& z%#2kh2r>m#T3TXVya}{FK`RiRNjm@~3f3J=U6A187>WetnApq|jVwMrO-w3_Pf5?n zR10-g(7ujU|9yJQUq5-WhUuJEp{{&g?)qY#oK^qta-sPot2LRRiG<_~RZ48UGF6$T zk)_6}#C9r^WdBeVruVvdb-I$(RY_4N|1Ybgh)-8#|DDokyE79sGPP2ZsZJBRNzpr$ zC?cby!-QIZob{`2RMh96&X|sWaa^+wQHqo((o@rtWpoT>B-2r$s8bS~Df*pC>-VvI z_{e%O6sfUkv5Crre^Tnl@jj_kT3(%A|Dw4%?bnMZ>i;7>>&J%p3E63}sW3H?br%L| z*_d>-OdJ=YJXWL3O4jHNfBjgHk8be%m&NJg{Y#aj?f#1%{~`Z+^IemqR>mgiO?x7) zXR;P8WRN|PMZttCLlA)1Jrz>2eU15}T2sCzZu&WNgXTmj|fTvDv6EOcI(5 zQ&=6Jl&zOoh#?C~PgT*CS2iGSEUfgha9E%8%B6Lp4t6As30%xcNdseIY#=r4-MhCD zbzQxPoJMey`N(5TOi6byuOV_1Xo$=dK8dL!Ndb5aMQmVU(N z0f#bmj&5!0bGk>W&+|Xxbazx=4*S~mINeaym&4heLZf8HEDoohDfjb#jAZK z@r1hssuR#dy1F-vcey$}UYU^r`WaULzC`m8Kaj%YHHa#di&(ge$t`y?^eSN(j)>JE zrI>C{E|Y5!OVx}UW-xRU@0&3G&CM)a%tu7Ae36%ZR4)bcULS?i`w05TF}-gP?;R-8 zK6eC9%{UFg!#E{WmvmH79n(IcNW&Ov_VhHqt_%f zE_|0;h!=o72sxb#G)*LiMfTwlMMVtmm z!>5C%`OE-M)5EwFX3QGo>i}szH-M+*d<&k&`vE+y*E#T1{tI|2cLZfny{H1&^{05h zT7;Ygv2X=q5u$iMt3~b=%;KpKix5d~OpjQEDBd57_roNF#gih&AVSpy_fPk=32^7E zfxA`>+#ja`ig&l-z41S#Pk}q^c*r9K(oYidN`?@6?_GD7+)1nq;Qx7k>2Uv^0ePng zwXLgZ{rm8L%3lQ|h1OUJd1XMWlLY$!wJK1%L7iQoGuHW^@}qgE1#3fMz4TuHa{6p2 znOZBxK)u8o&^9rx{ZskO(xHZ0h-qohiKYLuzOw`^urWY$U?tb(@aghtdE#p80d?2F zIVb~ks)n*zjS`?W6F*U>Pv=L|#zP)dlNzwJctCDcFLY$4f@U(IjCf#dY}N0ny0%i4 zD@hi_``C_CzlI5r4;``TBoW3`I;8lt?*7&M^jl2pOto2m7S#0#t%-PUqWOPXA2d`2 zEl-EI)S~18nxQ)Q)GTEqRn)&|6RV%^$9iGwH7!3ttqe&{Q3j}zGXnZYg$d^ax|Y++ zn^?6nO1QfMo00^=X!zeDJ%5Pj0E*#35wR*=YPNO?;q*36**`W_8J!*;6c!9oh4nQ+ zrIM$oB*$li6%0?-(Mok{avI$4rKd$GHA(3SG;Ub`5R#lWhV&2UPts!3D7r<#M>{G6 z2Vrg*@F8+459=Q+QbC@amLPCt2QRPsqX({2;T_wwjR}ZJh$)CFL=9pV;$*}bi1~=i z5epIbAeJDWK`cYOj989%6R`sE0b(WM6GSay4Pq@K>Brh*ifD)Ef+*(S3b{L?H=;kH z95Dtl2~mTXg_whwk64IEbxK_^@)E>S#4^Nk#0tbpL@go-$MYSc4ABQsj;KJ)Ld-$T zL(E59j#z+Lh**SJj97wLidcqNj#!CEBCvjlV*WDZUWopPa>OLWEW~`o4T#$iixJBZ zYZ0aWS-DY}X(S>&jYNYTc?j5!qLVU7h&q`>#cBx15@NIAaiDaDv_BwT1`UL`Y3zuG zLU>Z9S`ff7q!-fBaL6Z9jjS)szYt{{iHKE`0F@droAib9`(~z)fXqY^rBsmtV9@HH zK7j-)*$oJ!0ozSBO{W$PAWujip9BbbB-86V$U`uKLq1Sj2+vH+%s{&s>2gI2uesLwlyC_M~?V z%tZ<7FQkVxISqoMfzwJ3flsFz~cS{lyEs|#c0(lcpY7>(B#YC!890y)P4MgXTe41n_tMapRn`(isO z(!K})OazRAbSl6B!g&~eHVX7G0eCQk)1GDdX9~H}d{{a3tVPRVZHHhim+qr5$I4F@ z%Az5xe4+b=2&^7;=~;c~*^UheRxfs)!1i}ozO0_~nvq_Wi1npc6JiL9$Le1{JOFA* zog9%KN5Ef4&}jmEX6v7DPdgUoPCG?6183g(z z#B&7r`1DjF+_8Jc!Ibrc`*$H~49@Q)#4N--#Ic}f573?*GoB>uR0TmlF*gqS%Y*b2 z?yqPFlSjk&{U4{V0U7jO{XaLbv9Q1T;h$!qVCng<{${`(=*NHcz>!9zcl96oaN*zpr5OHZBt z`OMjK=gTf!yma}>)nCf5UB7Yj*6lkLckkVQ@aw}zm5+b>{mCCs|I}7hKdX8E;$`it z*Kgj!*Hnnapn;*0v5BdfxrJrJMpja58(Ta3#tx27&P`mJHglCVZ_%<<>o#rO+O_ZC z?%~RpZqen#>6kv$7`*A2D*& zXvN>lKbt(|^Qr&o_W!5L|G(CMKwwaC?~p#BVSW3BNAw>c9~c=G4Zn>XGjtf!|KH31 zKcW9jGESKH^!mdC&Sf#U3XFlXs0Rroy^+)VbPo~)XEo;1guB!_d75xu>krop%&CO< za=4CREmYz4X_`XXOX!t~Xfu zb@ipTJXZe4{m<$l_AIMMgsz^foG@L#vvTA*IV&dudzX$Av7LV{hh8z&Z*R12{IGI{ z>GEae1nb(bpKtwsO%<-h>eqh&%p~SmzJ2vL)x%(2doth(k%ovPgSCICPR`N?LC=Z# z(bz$H^-2|HUj6h@x^cwP2Y%E~;;0knLtkBfEPk|Z95Fov==xP0r*-wA<8YYnx{}2g zXPcORKRr2(FW0RjEPfEI3Cywl1N7QY+dD|t9wra`D82k6`M{6*F;K64G`}e50eyXj z>Wu>`@2^{rSpDR>_KVlqp{OA`F7#c+GcJ$QK~bM}8i80do37HPjU$zsM*lLB1Jz8S;I|E07;Uu0?(mIZ0;i zVXH0dgCakJav5^?a**J>ke4ENM12VcI|%bfIsH8r>SBiuR$WI|JM7{`lF>?C*9@Ld0Kh7vAN4^JnCGxAtYmt{B zHyy|HC7$0Mk(Z#{4f!SHGHjn2avzjmKpu+x7V;S6bf1vAB;=q<(ewR_=hWrL{ALO@@ zha$g*JO=p*o`>=zdJE*mC|``c6!}Kv<;Zs+uS71M-)oV7k8;ygrr!g|ORzp1az~WsAuV`;l#6zrP)y$u<)J9oAYYE@>E1teF(}{3C`m#tUibN9{DvscLiqsX zQY_C6c^=BgA}_=AuE>|8JQjH<${QgsM7e0klB3)T<;5spg1i*@K;$tPUy8gO<&nsJ zu)ZG1D^V_*+Y#-EMc4SAH99^-dL z?t^mC0$Pjtw?=s=$`g@mQQiu949dqKPeMK)xf_<(0eKe6Q<3K(pN_m3McSODn zxeC+UB6maiJmfi8-)_i#Q2sgcEKF~QJQU@F#q=n*M;?RnA;^=EFGrq*{7d9{$k!rY zj=WHmWB!ei7ot1{c|OV=kQbwTDDom~UoYgPDE|U^Ir49jS0bN)ycYQ?xE0CLxXXTAVo`n6;1-T>2Cm}CGxj%9@ zl+Qt)k8&U6J}A#Yz8vK}kcXl?7kM$(*9CbD%10qjLY{`a1mibFo`v$!BFFN&BF{tl zWaI_Wto<_N%TXSOycF|qhP)8v3gjxx-y3-`%GV$-L;e|ZQ;gphc_qq|k=G(m7vrm0 z{Td*5M1Bvs8}fYQKFHT04@JHUc?|L`$dizVAkRXsLLLhiB@eLvi04aozZ(lyDdrfD z)9t4*o&YKIkA^ULG8tb_J{ER|m}ByB@a&F0PZ9PEX$X_2=yt#ur#27$_~~H9 z(C6bnl4tydXV;4_S_8%Mvh~_at#`~S7_VnfV|B5bhpm~YA@y=2b!Q~l&G^lFuP4GSUZ_xJWf|%#%Zhc zQD1e(v2G?Plad6^z%#8 z^&gX~^z=>BtMv5Ac>VgRbmNw#r`B=({4+n2PtePc+UJ#eJX7GJzQ+jTGg+rk`u#}u zElHL?dm52`ik#X}>1@*Hb>oNGUF*g(;R3CD2!j)O6C$FrU%qX zoR>*ZGUeiZVUkc^eL1V2IG&k(n|?A!pU$BDCC-N=VZO%d#&0srRhqwe9g-}JANt80 z+wZ7rALI0sIkx{kR(M8&hKv*Ro2b**I0*3oFRshuK3aF^{ydE#wutT5kJHtEoKW9! zx^*iBN~SB6SbmDI8qiPXQU(3g&Bs(Bcls&H>3SiSMxUpqc3Ao;%Beo}<#c?~)l!sG zo~j#vsX~3|Cv#~Kmp&OPjvux^R5yMYr=MbcTE2dKnt$DTKxdAAeN@7Dp`XmL{iQga zz8I&U%!&Ie;))@1amI^WoRRb?VfxgiSbzE?3wsKaExeB>G z@+{=yeRvLXN0jFwcS4?z+!^_Dc|a?|PTzS$7D_?)5h?g4_qWDe_R{X2`|ooh^{Z zpxhF967q(~vyeAJo`>8D`Euk^Ht z3gq6%Rmi&{&q3Y|c|P*)$P1ABATL7R19=JZp2*9P`y#JE&LP(#=aG|{tiArorN{%2 z%a8{l_d*_wJQR6vp@^IwE$onHNMLqy|IdVDj zO5~BqYmpB|ZaRy#e+Y6%zK|T(7DDo8KG01a~Cn1;M1xOZhL*(Lf)$qk? z!R4Xc4Eb{8=Ew`tZfS|U7`YUADRNun<;d-jS0ZnWycW4La??RfA5D?Vkh>%ILhg$^ z6gh`H26+(jB;@0e=OCBh1yw$BL*xa>O^_EMH$z^6+#Gosa!ceD$eodEk^3Sy9n9J{ z4!I+830|PNAvZ+sgWLppC~`C8G04r4Cn2{)o`u{Qc^-0KaUd%aL0mS0Hyru0rmMJP)}9 zFDREIH$z^C+#Go^a%beF$bFGlAeYG4e9AjGfA@>*4 zBbSTmkt@XXZY;e@OpiQAOpiQYjPK6k7l`qZ7m4wamx%GbSo|_EKJp4NK60%X-;9Z`4t{8Fiv%NwklMmLkz2BwBW;U57f+QYczvGH{1Pw780v z1<`^)?E}<_7DLgpMeR7$(QnjKCtBo0OC7blQb+9r)KNPmb#&)~IwjUewA9i4AL^(* zkvh>LOYNW3rJ`P`eUUn9C!~(rQ>df%3F<`4yJ&%?_Ei?n@}>3^>Zo0kx>RhRxKopg z;nYq`9kW-`a;Tk#_AA|wsB>&zC`$L-CEIri)IGn<_EYMfn`ZkdLAvKs+5SoJN6%pg z>pcfb?U#LY&tcN>QuoxX_#AeKPR{JdVY=r^nLRjE_xv=o_lD`7<7W2VFx_*n%wE}F zm!8=-#ixhq?@-YC$aQjNe+<->&+Lhjy5~-rJuy;OKC@Sc>f+OJNS{j;@Bip{q|c$_ zbOpUrdz$!^G3{S!Uy9Z}2g~eT(YpMYJxi|3kJ-D!;QM>bG5gmbz5b&1wLsl-w9Gyk zrS}{xwV(CY^#`**hwGlBW%lWCo&MN?K_0&>doxl z5xRC#i)P(ZB?vJMfN=geLXPyW!?B;`NXBkM6m3w%=3N zkK+D^m;+l^=m=+;6ZbnKaD<8Wdhto_G?)?ex$^+jbE>c&)O{a;tw&*GAj;W(Yu)!( z*nVSO{V*CK4=kdYO;j51G3w+vBPQ_w?GH^z*(&$7mXyan-?#Or4I_?LwTox=xrDv% zyp4`soHb_AD|_D*oMX~!`O3xafpNb`dbe2N(5CHxwUE@`V2R1$*@IiTeEa%%Vrpr| z&D8C`t0gqk!{-v(u8%+MxZ%&%pItbs`t!~>SMImfqnSbWLE8JHXUAF|7}MFchi3zW z-oskgUK~HT?~$X|!Qd)g;8|!Iv}?;`=jQYM&&`3?njM?b*5dccM;Eu=9!3qE{?50q zN3T8eQ|!>2TeprlX&G$vJhAbC3z0j2c++m)0>xrxq-$Vw_W{4 zR*b%xW4`6R*8kf4!m=FaaV_Iv-S-Lay!7Od=~n{#K8S6x`I?`!*Fp8OUl(fZhn6(l z)bvTpwf53W51~xy7tMyp4oFLG|Cyu9J-J~A?^_+8ww1?!J^s|D#*h2Xck?uLNKXrM zPIIE#_V3)rf4iYv-R#ws3ya(Rcy{=hrJla)Z%lcu+!l3v;>z{1qNrCto!HuXaleTB zh9uYH=$o3d7Ljr{s~?_K+YYg{3%ogHWRI&~I}NRM@mWN!+U=gXZ{o&DvG0Gd85?T> zeQMowUgFWgGS|gD=XNt&X_~A$ct|-x_WQ9mD_2)0+NYV>ukKLYdA#kw&_n0e|Jplp zsxxiCk6DXf8kA_7d`U)Xi!x8#*lf4&+Pyn!D`j{;w~37_UZfsb7&P$wsU>X(ZM}E3 z?Zd)Vo4)gdbi}`J*^?S=UdyS+Q_W-24SxzcI?|&y)YST^%g};?(th+rB94#^Khp*9c9u^gSMJo zjIrGGbDP+~v!=U@JzO&@b3$dX`}sZv0c(s*PjSZk3o1*`KN?i>`tGh^$)81^%UWf$ zR$s_{Rs3Lwk-dq3obRLoBR8Du5^!%oYN}Q9>MnzS9c+8J^|D`ncboD1(n}M4!YJBf0QuqeCOWVE>u{SL`zo80>*FZroiJC|c8w_LnZKH+4y z3zmbff0e)b@tF83ZN(#hi^j)l7gO9y>FwCwVsH!+>26*q9|9p0eF zsR*ZSw#{6JVb2wC^| z@`#l5DGydYy5{?8U3%Qaf!=dZR1WQMJLYz;^D!SnTG&q>QaCj8QJbj5i4O5UJp6sM z`MdkcJu16&EifvawXf5QAS>7AH-d(hFMZx*>v|_X$q*En@P5jnj)tfD75(x2?)Cob z7G&Hod$8*1fW(Gt?KfZDyZ1$-r9K%8kA(exW?6^POL!lv7rPhA?7C-NxD$Bc@a-0y z{g695=ZyaK`NX-WAAK`CvSS0k0gnRBAzvcBRTF8}d*t1~Jv%(TcYl-q%~?KelG~n_ z6A$+*E4`RlQCqkp(f8E$-GfV0ELN{xLJo%x_nUL5Z%wyrkr9#+8{Quh#30hOU;elVqk?N4i%i}Z)teQLB*zx^0p1bViTjp);@mrwx(q)_P z+>yM$^z34eL3RP%Ymls5C9m3k^Q8NBKcDUqCo;+sQ-3oqTsg3|y1}vacOwgWwv_&| zIpd4q{nu>HH*lP#TG{hvWM$;8U;DptFzL7{;?Uh+wwe$4Bks%3p6ol<+<(kvGTnaN z!_50j%q<~OvmOMhH1mspXsQUcAXl$`cSd$ zk;49+!%uDRvgW}4_uD3PZXWL0`K&FsXW@HQw`(JQ{9>71;5+9tA+@)B9$uVflI(E7 z!1;aEUT(+MeMi%unz|ffCsF?`NBncIO{{hI`{nNVvm<#1o_>4amLW3*rG&dbIHY$yGCWc88$a~&$Zv&TfHmx z-p%;-2MpURUU@|oyR`NFBl|Kf?3%kbbybYDEne??adyU(D#e{&PM_#-C)v3Btm0OG z7=Jb9_p~sl^c(ZO%ABSd)on%E@lHSFcRL>FRysd>N2}Y7sQwZH4zI|&^0j-=HRne; z>plHmjPuJ5n-;YBQFh;l?^>)-U9I@@{Ar_KHq1MiLK5!pzS)0cnrY#>HN6*TN~7n$ z9dbf*=JL_vz_j>lL(UA}+x78#+mzUW$DD8MU$S>~|EhKJN3U*$yZ00J#rcA^I-J0Rdb)2ynd2*W!vFl+_^J5 zzfke6t1qQ`R9ZStaY$dBJnF9d{DjaR>y}toSb4|va@e!@MQGcDL)uLCygoOxAV0^~ z_WK(i`Jv}iKR0~P>T9$K(3XNTfUax+*FlD6L}J*GNQ`WV#MqG-7&jvZCasBqsRwCb z+Lbgg<46PZP-18vNenGU5<|;mVq}>~j2ccOMvd}`k=0saY_)?JOAira>+{6Kx`LS4 zR1*_hBZ;Z4qr}wSLt@%ENMhD_sKm@cB{6f%lbAaeNX(t~Nz9!uNz9u(l~^<}Gq7-J zZD7%qt|JNHE8)x~g-ytUnef7=Kr527bUBpAGcoG>$+0sB>bw#gGbJ zdffv9qDVkRom}8`@dpX+%181t9bfS^b*FTk#Xqj&LUy$aN(6+DcW{0CS65U&K6&~P zulk6?Wd%B>{|{3|{xF)>V6QIzm4U1~KL1b2+B#7GSD*Bl&9^HE*iIgX-Y$vzFmf(eU16EMO%m!jNs$&-fy7993H zM8k^)=L=Y{^?`sTB|bk1@t1xnpqEF5fCWdKN`(9e1PQ1(m?B_~+AW}0$W6xc43E(G72DheEZY+;pmyzK0dvwe2v{UL zBVhjb&jf^iJWA8a<_9t?OBOKaMZSOvNwI(x+S>wp9XC2AwBvJk0n1#X7%#|XXjUK~ zi8&#l*OyNO^xtG7tWRE>d<2w#IZD9%m>B~4TWk?fwc$MDt)2^5Hplq{El*y~3s`2U z6fnna4#N&R1+1{Y%;b-11yrqTdQ#9wV4#5hXT}Iv;5S!5uVp(0lwP|epu9n?fU?Fe zr8HeWyaJeF$@6#tD~`+(kYsNa5XPT?1qIats%#ny>v2(P4*_#_jS!IBohqRG)p`L{ zZ;lIC^r%um?Lmvvw49vJJq1*>8z^AGp$q|4+)@Fxg~bBO-d`8ci+jf~+4X0d-hXO< zfU2B$0ktV#2v`*SE#oGq1T6XfHvzo{TArclzSz|F5Cg*ATvafvwEC?JfV9vEk0;)!>5HSD3J^{6z zuL)Q%tyVyJg+-ZA{)8q1dKq>QP&&J(fc{2(1+2&#Dxmg$l7Om^ECF*0W(g>*UMirM z?LiU*gN-15GFB^{~-lmbyKw0EU0Sjht7O-O9 z4-D@e7qH0ivVd~edjghp)(R-&-wUV;wiK?viUOSltmxq;ptOBA0qb3#LA?dHK(?#J z0rOpcmm7L^Nw)gVZ^*^ZD;q93=a-xs=NtU&lHU-n+s5C9)%aQbbhlSh&@n&jjVBam zXTS1W>**>>k9+5L_Tcs6xdR^i*?R^4S)NC@?b#hR*&3R1$1XiBaad-=8H8QlKC$n1 zzeZl&wfuxe+|d`teSV3%>t}Z$!u5>AnEUD17XvGv{pR=kGv{U&lgzl21|_Q5-<0?b zJPA*bc}lrPX=k63;r86ly$w|DX5R5j{ifUQeM6+&fZvS!miDvdwjWrx=6X z_k261Dfj2wmp&D{Uijr{RVSh!Kl9stx~G|$+K~HpWZOpzR86@@`^HaPbH|la7@L3l zQ(IGx)uS;teAHO!i|)o;zqrX&wqtF$o^d(nc5!Ch?Zx(wFL>E;oeCC6rQ@1%0oD60 zR?TY6bvUqTl*IIdUrOSMvwq*V0q{VYD zSaP3lp4?Ws-GSRYdhPLuJZo-RnX%%SwF6gc+NqbPSx2tt-46GU+c|J624uYZ_Qy8d z^cfX<7u3A-TVfG$a%IPsoULVcXl2GLzqFVWrU%Y6(U@cuG`PUEPhOo zaT_FUXIOfhb62w$>>V`UnOm`X)T-tlkNw8Jo7(VUJkQz154_fBr6*_EHO?u&Wjii+ zRF+R(dS`Curmf*)hx>9a6XxF>+d}L2hjY(Pzx3_JIn2wLT$y0cIgXiA^<+U8Zp?&< z&(|#Suthm``x!7!z{5M+j6yE{;{VuY0VW}tUfpQ0<%cp(2-kXUUG5$3}5cT@FUel8)e+rFNU{JT=nC+Ygc|@^_?kq_vie*E%P|;#nNWa ztZln-E3DR!nC0rpZQR!*OufXNyBEVvT4LnGc{f=5TbrtmoY~;{L!U?W;6jfLu1!DO zjjM^RwrcTnD{g(1-xb;2uH4}2*|Ae2dvaE*E`NAC*oV9Ri`z6$OE0eCPVnL~+bX|J zLw{=&FKx|P|I*ni%d-u4zEk2b>i|Stm_cbt2>wL_qbwXWqWRC^Tyf}MeVsU z!*6NkHuK^ZNB{UFT?TIwIx|2$caPSu-Mv@~*XcaB^MJ{*WzMyJTYmW2CB=>7zA|qS zW)bhkjc{Lo*xurUUx=B@qu$Y7xi3TaLr2b5ny^1D zuLtM+<<7FCs5gG&+s$i|{AE|DM`{b@;`e^b2ygs(Oc%~^t&PDW$8I=}x^aCKrZYB~ z@Lcog#Seb+?!{Sn1vJur=flM}xjG{-i08U#dW zebvS)Kkr`ddD_4bZd9Ka->!Vxo_nC^JAG=uHk@a6(7nKh?KsH*TT@xvaBledtBpOg z`f{OTu4j*s2XoEOM*Mbpb2xY4%H%k|DeXAhEeBqW_)f;%ns7c|HoOxT+IGvchtu0} ziywceNeJgTt6`Os_a6%3iZ}Itm%PfC+qz-&^o?UXa2Nit%07A~h^rXje=NMD9k;qX zb5g$yf6k_H#WBO+CfwrCb4QfN{kgMUuLanh?85c;9XDvv2DM+%&431GuVq}#wfknO z1)f~*IfipzH0j3;-u(JbppXRS}3PoZ&|fd8Nz9vgzkK2 z-H|(aVZ+QOhr&7Kgu?g3i&}F{`#PH+ITygKUFmN=`fLEFlD8=x+^sjKXyMki{PXVI z#eM!>2OL^))qiGx_UC+m?(vYDJEX`M}$}mTYlZ_kowy=DODAe&F}sDA*>A|G^3>`p8s94e^Snl_k7|H+O3@yzULpvmu+;`yyrE)N>240 z@}AFdzL~wS=X<`H-~cNm@c*8lGqd*o)nDK7r<)esl$?IY-^krJL9yc1Bj&FK70RG?c&cok+D?!werSmzKQC%v+DS3{@w4hIyT+;nm-gjR`KWZ*Zk1CUIQOa zd(Edhmu-@yzUKSRNiqo-@|t(v5CH$L`2v^Yv)a4A<_-EydUdHWgcnHQ|22QDi*(8S zC$D&qq;U>we|g0ltb5S^!jV_}b?KnrTJ3nnGyB*p{zJ+9h8@3n#jA@gYaggy@jE8o zxA<|yE573K)we~VulR@VE2p0De#N)DR~Gh0_KLrfFR>ZW@D=~san*vuuWEU|d)1`K zhqe4AN3VI3OSOE@4{#y%Q!T&c**L3x+iLk$vn4M|zOLo_7H!r3INc@_n#_xXH@f&Z#rPu zz}EL(@-;?N0!Ej;Hy}bK^k1fq@v+2SM-XZ01)B@^$dhs#C*{82of9S9){cL{ziKgX?336R{5O4lzZektf=_5}F|;)B z1%JC_PVwL_FL?hr$;8(!U+{}}bV}N7_kuTW9HYoJdcnW-Xy={q;yM4wzN~ooqv!m_ zdryugmOtl*NWbckcj`I6@O7UP2lhYbONL3t8Et#cZ#mPlDt7gA{*RbkuZ#1a^9rLF z$>5yle9v+6UVn^#&fkliynk8zbDr5lpY!jh{rIqTpXdDVRyCn!J)ZOZj0y*TX#bqI zuf6x1Nz>>2@nhC^TUY^?OW^-GzcTz*+=6E{d=I0J-&Xuu!-u=yJQP%3!>8?RJ*)V1 z4S#pRrd!;>8h-dskp)+G*6@8L6ShxXU&AMs?YiG%Ne!P9DrsSqSHpj1x1-P9Nj3Z! z#Zd2asWtqH^UqKItf=8nPdIt+T2u}H=31PxHl&7+c)HcAZOd zI7@qlz4>h^{Y+mv+zPA0^(dR}!;}2(bc;gcJj1RG`*p?mgjNf%&$nXa}pYgAo zj{Rdq$7lRc(hhy^w|vHb|NG;ZiH^_s{0m+Zr-sjXg~74*rIKfSQpa@%XS}H9`(1dy zuK(}V{IZ;WC9Nx}`2x9z#QX}Ndv3&=Q`Nk6_hIv29jfO2J4m)0?y2UD4nJGuvbmaH zWAmV0z?y1)qhaxI^;gxrZBd?0QC>A4ac-T@>nYWI*S5LE@{DS}t>o(D-AUDa)uJ3# zi&545u89jjUmaD=FKQJA|JD3(g~PHRxN82+ffZSUy{q~5HU%vt?W_5$H_dZ@aINMa z4hqf5wy)-|l_XmBv8d*4WD@wV=2fnTrx?Dh;=_xl_NsYO#cPUr-#_nF@dx{QCO$8( z;+y8K-)?-qiXU3x^0vkCD!$zxA^v@Ttm5zQuTGr&T@}CdGw0ZS-&XN!tSg)(YpVEF z>%Yv2T3p57=#W3XXigPxN&crG{J0ozmaat8G>HJncFQJNW*fw)? zv*A_zsVnck**CC?_dRRgXmn^5->Q%IyheN#Z(Z6I{;T+HC4bDG>t4lo@f!Ikrezi1 zCiGh;FK3Vou8Pl*N)Ej-0j_;7)xOvA>s#-B{l{}HpTFwixaUu_ytQK5AmayGerd>p zLfH*1-*ZjZ!NC``yzlh`1IC}y^5X;M&iuAS%a^Wh{p8^fT7K^nPj$zgTK-#Ul6meX zEw7lqbJLC0T0YuL(t}^3Monl_&S{I}wEYd_QSCtmh3Z)wT4TK;jniEk6!w7jXIL9V|{%XgnNuv;rfh%Y!Tp94QcY-p;bb_{SL!t_(@_s3bQIl@J-A7}g5j*ir^zQfL{EiltU8um3`o>4?DvlSqx!zbzxm-SRsVz^|kOcJ$>i- z_pl+2Y1vmo%Ope&o5G$fdm_IsAzJvduV);-(?csE6`)*C6-IyoRY1#19Fv@o(TY7p zO$+=7Cvk-Sjf3Yv8c9ea5nB-A1YeA^k$fewP?;+pG=}j94+=_5L5G2W=7z-FyCE@G z8>=Mn1xHi0p{jwxz)NBvAqHNoKnZ}El`*kOFe6spro<}JLgfbe((r9CppH0!!5HQN zMKePQF$9??kB2W2Kcybz6oq`{fbeC=hTi6+q1sr^j?GBJ&TwpRMC`ry6MIQ9F{bfs zjERlLl-PKi5Sz{x5W>opxL6XG1SxTewjwUxjfjidQQNq}uFR%Hs-pFxPGVv}Oe8CX zdeSh#JDC$FjTv!DFeOgWCdA3xm^gK|uaKA-5VKtda!FH5(o|zXnkJZ&rqO1kDYUm~ zXD6*gg?)*H>HyMMK^m*h7AlDi)GOK?>ShLYYgA#WvQS7I&52_|L*f{1NgTZ`h-0Lk z)}{jLBDhtgQ@n^%VO;4PRiM4Bps{X~;=N zbt{GXTM?UG7IK<5o!B~E(PQ}1n60-Zv5mCUnpc>rOcc;ZdU><di^zHHhU@@c%tN^WLsK#V&Zc!~A$g9l;Pfo~4=ss#L0-6ari4*pB5yS1G$ zv4inw2mNCgDbZp2Wt0*r+Y!5(^2ji2HQj7X}e>1H3M8z4nQ9Y&e(@ z2k>GX2_5HwINkxpcrS(%@*X_ZAw?&9XltZ_7u79YQzUj!Us`YIGdt)ryUuiM7|?Oy zC2_SRt{Q9N3gg-p#=r@U!aRbx1pVg%edW@*ZXQ_+^~es^L|YhpHl*oATob8&L0|TuFMH6J{UXpW)kkL|FKe3L0}EJh z0b$HL!CZBUwAa#pHh}VG=M&<(kdRH-j~;LwsK>Eiqpe~6l#<438?CgWQ3*ZQ z&^5(}mPyC2+Q7@Hg9&lg)DUOyXT({eCG>1b{C2L`x!nxZU<|>@K#JQ&2OvrcO ze-h`C6Pyp}xL4b#tQFEvj_Jee;ay`J^zty}9=te;^1BtSl}t~kNE;-rRoM9+=T z20TZUu~wMFIiJo^a~MZvFoxhb62_3QR>4}SzfKBke%-oh{nvED*rIE1@g8`e@Ls(& zqLBe<)Y;ri;%E%#Dif#&U}rmplSjj@t{OYi1l9uw7}vI48tpRInpPN>8J39s3~D57 zEoA3!QFkv7!JPaF)}p^zr$!xy??Qn$Z%1nc{bV8`CV`Nb)PzW$!sqwo8^4tSn9KTMj;MypmAFOA6;W{srSg2UPd%+}gax*2)5A0!F z*b!%FqjR-XIL|{ASQ|m#8o1s{FogNr0CaC4teLFL`r~AMDO~G;4;I_P*5pr)kGl}B z*C|3qh;3+M4*InuP2hT{NoNPGeT8kAb%~X33^pr+ccOuxAm;604t?Q39Ck^yaAqho zFJadmPHyHfwjF63>V*qq9NOdLVMdxn=NmNfo^Q}ZGRJ_ORhTaP?LgiM=6Uo|16S|G z2CkC%26Z!u#Zf>U;TkW&8s>?VSY5PG!T1ACb6T=zpejG!G-NZS^C0;bjHPPTB31#1n}p+D4%T{oNKy$amF z#C`gDj^1Mgfd5y=-)C?ww+*~*{FzCJ8RRX2ub!AE83=V?x`t~eXp0@JXLf4PwXp74 z!B~Oo_gyen+FA^9in5bf|78@gHe^8?%!G3+tch@c;RN>$aPL9S6D8uB65Cip#)6** zbAh6Sj*CTxUQTVyh+`7;v7{L>tDqys$jea9@`b6x&H*-X4P()OSR@${lOqsDuTc#U zrvWxGB~9R-p$XhGG=cL#lg@VXdiQmD=LMJ`#0)&G2Sqcz`xmjUXCa)(B&3ASTR@>a z8z=|rLmfpStOfkGKin6Kt_>V3z$QTNpGEim5jgg$<6=0)9Vb|LXUro6aTuZoF%NMA z;t|BFh`%8k)3U(H5W69UBaT6wjF^wO9`P9BMZ|lE)rjUtS^g~$dm_dmrXfy7oP)R+ zaXsQ5#1n{D5$_?w_LI<#M#mVoLhOtfiZ~201#tr60o3bEHHd=p z;Dg$1^xcpKQAz0&hrp|u)8UKjgQ9vzcIoVykdgwU*Z^J$9h9Dyk)EOyc-1T^qmseNYE5Qr%AmA~PzE8JjRvJ9 z#ik{sC=+^T#luTwAv(MWTSH$gn`{)N)cmd3QK(O*TCGeI-&9Rz8Hq2eR^rR63HiK1 zSO$`Gb%>I_Hy4D;3`j--3%5X@+4(ccyfj&6q1mMP@`aFTxMdT zQZ3fxypV?`7L=n3OB<6ugiN3z5$Oq;DauemLGZv=zuuAkdxwiXa-QmwDH7fcKAOf4 zkIm5ZR;$xNd!AH#dc2baxd_AR;qGlN^|>fft_-Nls7(CB>@AOC#|xI$b9xqb0qwl67M%vo1eYoKS;b zBv4CuNjnT4s972mQT|cL5VD(w1O&kut0T=LSe-mU8J(?C4k3dn56MhPk;iJ1u!f-x zV34Q;g^*z3m??32{dd7b>%VOfUiWr);QfW%=-7kI$qW*dlAeKMhg>om5}T|ENmoZD zrzNH+2gHqq{wKX?3fjn^^n|(@bBhGQ(AJ0nptV3T=Ku}7h(0b;qs$OmKS2@#vsfsH zd?p-+B*S|yslIz>LAtp@Sbta+Xixx|4kI6WJvl)rFE&w0meQOD^p2p~IU;CIPKQ6~ zF4Qb-Lb5tNEfuCJdxd=lv~sI3N_u}nB&8!>PcS%Eog5oS%MGEDD2;BMT&20QELlg8 z-82N3395fWY-mUjTVH~c)u66)bvBe=APGsy%t)dQ3d|fcMyZC8`8(@#VQr)>d8fx= z3M=8g#X?U;!8!o5SU8SUvVN7(y3yR^>B)40!c->aY#fWLEh)twq0xqrA%>B$$r+$w zTrc(4@Bcae*EBFfo~=nrPaB~UU?P3fv&h3e{v%~Uv1zijbd4-gsgcFA zF(sRrt{w-IQI;SMF8bX$9B?uiaWc#TLg9Y$|FphiV92Dw+Lyt;QAZaW8NAz?u2=f` zcV2(Tz~W$7RPQ`v5pKfPZpru$FJ z#d?USy7uY%$|6~KK@?)2PsbDU`8zDR$kKVCUV{EE;=e7NJ!$Z%PgaENZ{5GG3jfiJ z{{BVStXhBnqUQf-wg1}-|I~@ADi13nBjDZ?1nMW8%Mj=Due^{)l`3#rOdj|9gmke*J^> z&g9qimFwx%O?{`!|MPW1ih;1-I`I0a>oPL>`q=B8>qiEbd#U_(#Vhze|K}{F>24jdN%-Um;gk911uY{AJjupcr8fcC>@=oD*O?_+Gmu zBmwXP__q+BfDJztykQZP1$Y;{6ppEFU*f>x8v>35??U4ME(Gre$BO|6II^^L0IrUfAzAT71Ew`@ho0XS|2I2b(5Ck8MJkMjY&;8&%}A&x&F{e`ItI937X zfv<$)e87d^pTKbe;5zVHI4%S{37+Oz3TOg1boAI1ust5T1FFGOy}>p)*^0;805$EP zPoe%4OTnjn2l)WHv}eZ@r-P?$oB{X-{HkIYKX9Kl6MW3~u=fJ^Gu%MdKz%9zec>4; zT2?gRLhv^s&&7cLut?FgDnN^l5C_670Sob%qPrL93c@M=2%gsG0N~?JkQR=g01kzv znud=6d=K6qj!73nY`cOU;n)svI(S-#8Gwzuv13QTuevk+ECy`Y6UGe0mjW&TPurLe zxTP2L4TNt49OcK#QUEsKSw4nG0r^}8d>aVA-2lfF zX9dB16&&XS{v1pQ*#-Fks(Qm5gyS5*?jg_za4ZL`1uuhRin*aohjw8wkNdJbDR%0| z>gEmjV>ltLApQYBw+L2$ip~2&Jt4dmpc1?f947(#4uClT$GrdtgBQjppc;=gfIIQH z2(S{5DRz^y`2K(!z|;Pvco#hFqX&R41L3(>$e&^j_$)Z4Ki+u&d{9o;zZOmI` z9mYEtaOAbXl2u+C7*JqnQ3y@ghZ=`A24an%#SLf#w1kZv=SXTmtVXcvgldSC1jcF4 z`+En2bvUpPa3u_E5TPcRR9#3lLyABsIK|)pIsHJ4@tdFINv`}Z?&t35-TVGu_v!QO ze#)U=OvbOkYZtx^UqLdSDLmuy#OL5;3txrLqupHR6?pg+%;PTV3f_#Q1{%;ua-ALU zb@UefXYkxFG3T_ehKE*?`BdN+u1s=q9o&ZY(|-bUU*X^E@N3{TNUm9h>cVeh)G{%2FE8IKi~kn|DWx$rIc(!!77**7N7 zyBhukE#*|&HYC>(?nW{;;hSj6_1}UQZcMIUfPX<*GP@t%aT7I6#<0B=US@pbqZ+JYazQ@>44@mcs9D&l7_eOpp*!b85pn$fPnO-Nps3NLOXehvH+ zlDXOkANp?6z5~97<^N)UIAZkGA7z* zu<#@9L1ap}>Br19?J9h5D|;^E+yURdlQqN7;VE||&s&D4wvw@B;gv}4W$WQpKjl3N zz6z&E>P>jq&ysmo;3a4ueM<1RceAGWG5q8`tQYHh0kqnjb*J5d|3D62xtCnu$No&8 zHSkrm7C(g--k;3FrSOUes6*N-@HLHb;%D%_E^C1|;0*@X;k9poe@0SQ`{Au-vd;*w zwz*HxUWKQ*>~Hj61%HS3)BXxv<0W$km&RcN%cJ@Bn zcff5AGB&&eXGqrc$cOlSFcM#dcPxApp1dO&XBMic$76+OJI^y$Hq`bm8XHzQfEE%4S|?DO;y`bZu-hCloTdldJG%x?Ca$C5Gh;a`#LtC`1n zekA>c`CszBg#N-dlA0HuH%K0vhnFJhUx8yJV-RMZSkxH&Jd$}R!|$Uq*Aza34E$dB z9+LalNl)?`pWN_N2J6urI+Ekc{C)_~l{pycPHa%FyR2 zSbm1F;e~%dyE(VB4<50X`KSFzc+7Lu3_cCdc|P&0;kIA1uC!}#>xj>r#|v-&4fBF; z!aYdpa0pL-nfm1Qu7Z!E75LromT@uIqUQ++QPgE_o5d5gJ-7XEY%DDq;6;zzK$-(&)}2qC+qSQ{3^fY)@a`V z4_Zbov#Y@a&Xe;0NlTveT8=6?T4_B zGH-4829lrkjy#n2JxJz6SYLSIaN%2rElXW|)Us5T{%hdjk@gv6(o++B5q=%XIBW2xQ}{k!h1-yf zS%Z%unTLV2agJ*F;|bT{rF}izh@}4j9+hQ&=u?IrRK^eB+pAbZeEN)Ksh3d|KZYyM z;x*$lu)FXB_$2D{bC&$Qj{Hr|nnx4LnfxuZ3+3-HqodNhtd8>wi20dG>JTUG3Jp&92sU zy1j0{JLrzP6Mp}l)-!rm&*^!+psPmRXc$eSWwZ^=aEzYOHwMPg7#U+@VoZ&hF*j1C zVy4ZEnKg4}-Yl3!vt*Xdidi*lrfSyBhS@Y*X4}+E$LyJXb6^h5kvTT`NFQ@%&drpi zSZOO`Wv!gW#~E5ht7Mg}idD60mTJ|lhSjuMR@>4n$Ld*qYhVqnku|m^*3_C=b1P*l zcG}L^SvzOv?SfsjOLp0=*j2k`t9IRP*iE|;w_+`J;$Ga32k|f-#p8Gy&*FKUDsumm z>DCl2t!1>VR?tdXS*vJOt)`7SgMPC{EhRI9Vs>N zd9BDARy9>?Xe~|CdfGr6X%mf~ggWU?wv+D^JLOKbqjnmdR!8gfI)l!Lm7cN6X;wJT z>XunqwcF^nNJx)#j7Z06yy`i^sP4DRgeWXwHnVvGzM%Kt1MWbv~ z4VCn?NKTK`j7ZFkw4_N&o|KeHh)OzIB%?oPYTK;K&ARyRDO@DA5rl$ zsy$7m=c)2C6|PdffREht&O)npddz9JOAe&TC%XYkFebF{uT x)cB@Gt%EVing^H>@0Y-U_06%iHP*FHJ&vi!4AuCbc8h=CD|q;aU;igF@GnZ2BcuQT diff --git a/_tools/pyDatView.rc b/_tools/pyDatView.rc new file mode 100644 index 0000000..5117690 --- /dev/null +++ b/_tools/pyDatView.rc @@ -0,0 +1 @@ +MAINICON ICON "../ressources/pyDatView.ico" diff --git a/_tools/pyDatView_Test.bat b/_tools/pyDatView_Test.bat new file mode 100644 index 0000000..85c4819 --- /dev/null +++ b/_tools/pyDatView_Test.bat @@ -0,0 +1,4 @@ +.\Python\python.exe -c "import pydatview; pydatview.show();" + + +pause diff --git a/installer.cfg b/installer.cfg index ca5eba3..32d14b3 100644 --- a/installer.cfg +++ b/installer.cfg @@ -2,6 +2,7 @@ name=pyDatView version=0.2 entry_point=pydatview:show +icon=ressources/pyDatView.ico #[Command pydatview] #entry_point=pydatview:cmdline @@ -11,6 +12,10 @@ version=3.6.0 bitness=64 [Include] +files=_tools/pyDatView.exe > $INSTDIR + _tools/pyDatView_Test.bat > $INSTDIR + LICENSE.TXT > $INSTDIR + pypi_wheels = numpy==1.19.3 wxPython==4.0.3 @@ -44,7 +49,6 @@ pypi_wheels = packages=weio future - exclude=weio/.git* weio/tests pkgs/weio/examples @@ -106,4 +110,4 @@ exclude=weio/.git* [Build] #directory= -installer_name=pyDatView.exe +installer_name=pyDatView_setup.exe diff --git a/ressources/pyDatView.ico b/ressources/pyDatView.ico new file mode 100644 index 0000000000000000000000000000000000000000..692e3f92d7d9f35410aabdc4c53568b20aeee29f GIT binary patch literal 140686 zcmdRV1zTH9)NK+11P@Y*dy(SqPH}Gw6sNemTks0j7I#X4QrsN^6ligGr$8z0fdILA z@Auswar2zWoa9XA?Ad#5nKb|a5P%K%?*Rlb0N$km0DkDtFxdaJ*&#r{Bl?`2-2b)} z007o}Ab^kWf7;)l0RY;Y=qnliw+#gVuCYJ>3X1<}zrz6l@}7bKQ1to#`gZ`MuV(}S z-s@_s5a3ebqW_9OO;t(%zil`1P-eH zIUi$SSOTD5;fi41_h31Nuf}m&zw2z&(kfFM%-^KtSF%<5+y1_;zxn-{+eBxOE+{4c zg?Fa}SEb>Nm-IJw8kQcQ+!h@`5N=8_BE2fTYBoFjI+b|+kyu69?)4Qv1wdLZOmH^P zURim-|DlVQ2^NX_{CmOwFE8v2LG^gL6OEIFig6Ba-!f6*V<|Xur|1{&@9(#(7f-c+ zWpC3`Z?%BbzEl@yj&@Mva8`I%2W28xPy$3lBQq#nR5^-CDzD@?8hKmr$ z?|oqrUp=@={tTg% zHnrdVgdg4+E%)nwQ|88{`6x`C!$*R4_Q+4CvZ6w&tD__2`}~14z9k*}9Nlvp4<_ed z;g{&10qC9`e(0AS=4E6cB8)5j!Yd8h%k|4NRXJ3gZW3Ioo5PEQKHRDt(Kn8cOc-1H z`aU`GL!J)}4UwV+5!beQjKbSMcX`k%FN4qw5c>e+H{(S zyEhu=k||U;Pg#6yTCZTB7(_VJ(#O!Z_}-Y9ne|UiP4)Y=hXy!0?k8bK_4EOFM#jec zL|?-rtMHzHHV8@-;|NDJwY9?(>lg1&s1pWM%4h#|v45Dexl3?BCh#GYW8gs}vd1}| z20iRn1&zw3(>>?s=e6aA)nQeuZYUmDx}ClKp2(|T|J&i$@$knBWV8GFtVlLDBLn3*Il`(rx7FmX*Iif<@~m)}Ef8R_LJ(YiMY6i&EfsiAhQ#|6TC} z%lDzxYCe-LU?XDL&dx6E`=moic&B-H<*VoY&W^XE0uKkHrw5aEToE3jDZYFZ_%$6J z9c?Wwh(B9fTPN2+o#SI;V|gLcLuSc^3V;sn%LzrIRS{v~tD;f63v8-{fl)m&980M_ z3!U(A=9KP1<&noLhx!(HOYs~PTKY~_R-23c{r#wwgR94HP;fO7T2>O%eMGV$mQGIp zRM2u-C!nOKAY6|bPN=GW;hrK~qi0o1cM*3`#04rQTRQR+kd;Lpq6gz{BJXi6{G_V@ z|GzFKpLQeq4a#TlKH1gZ$_izV`YY*?wOc#$KQ{6`WV>D`6*YqU8jV0a^9*VvT)ezD zySuw|CdS5hGnRaa`o+b?>w_~wchRLR?tQExw!DC?YSHY`d^eG7dY<&**Ucznitw3h z<)q3N*}`WQNqeSvB2YV8LM#l^&LCgMu&~Ga{{($oxqeq^(NUs6{-+EIsE#0{mkC+^ z`Sa&EdOY+MvreQot#)Mf7Ucu$_{`=+vSG|V(nAfzA1!lq|N6B{iHiDrqQ!$ke?|rR z`CX@ucFM~q;sQMgevWLl?&TqCm5a^T*#*(auMC-W@-e446 zb8~YORGGG(|3xcOb;$iTJ!)FCZpHn{b1t2Kb@U(s4aKqC0@sYS>cHfQS4 zxN!EUWO2$=#H$0xhfxHAnrqluBCOJkN7h)t3|d*XW~mingF#LhkF8Zstgng}48rE~Swm1wJ421!z`1-nckA2z^w>D_6$r z(-5p~HyX)g_keqo3+Jft@bFe!TU*yH(3-WQ6S74*C~`m1%=iRkiEq~8A`HeuzZywM+5z{=HA9If^Y+-FE3{+<(@LcSO`l@>?Ut`d}An%xym*d0Ly1 zk+FPm@rjln6Ob_y`2~`gVAV3>7P)t5sy9_vJSa>}Y(>6A_ts$$w#1d)D8Op+T2pI= zUs_LvCV_t5vfj5L?Mbr5$yHEG$n7~A%gi4Z-I|fu{c5^q28^J0o( zRi;GDx0H+@Dw24Wlohk@gJY+t)cHw@`E{`*|FYqBcJ+`N7$S9t5)boCJFgecg3f;0(}A_^Ks zA|ol6OH3B{SK(Ezq~8tH=P{X$@W_1D$G1S`-rn9QD^Jg}_5VlFl`F_=V; zs)25;qotUH%P7DoGw+US6hMi&hWk2V?(y($^`<`Ks;-DsRe;K?0^txK6ObgD{j-Dc z4~JP!hZ%K;SHsRJQ34-{RqA%ATlp0z#5!Pjn(J$v}vPty!Mc6&i%W^Etbcx#vPBU_LjDp2=(~g`6UQw z?s*L^MWa`|v}9?G`#f0*8&!#Fb*L2v}Zi-Dz-OdJ}ThPKchlRPuNZ>-EO)>hQ(n)eVvmTh!cF+W`Jv1 zV4PU%ej`M9nAytu(Xx}TkW?&j=|}|ac)BLsno)znC!II@qqBo1AJ>{du+05GzrU=L zznH9k^*q(z?bUYmJl{O7nXe&I%-4^QZQQ)at8`De!z#eB8RttSRg;zoAKK_z$rh9T z2ubZA9;k#+C5|ajBWA;%PEm@^ut>BySD7Ad{L^Q%#;Iw`U5s@u5&>u$e92*(1X!_nrPx@?ez6AtEm5O0a~9@BnVke*hk)UV?Yoo{)0l+V-a6>on@ML?G5jG3ep9-eX(;GGBPW+$4qWE&;!iga{;lK4%2!B;*P}o zwO67w?9l@q{Drrn9+B$?iiw?-V0f$do*O!5@761y?OGj6mINcrh<3Z%IwJQHbNeP= z?)5M@ALBt7AMP$~NQ3>gEs-{Tf*tl=3QmuET!6@(;!nHbx+T7NoyX1u#0edIL|D7@ zmO|l9Pih|A&#`#88$2JUn5s<8Kn>n_DM>UA#SDOzKo6p|z-1ZDCa3uqkZ3rAAUZ0o zl@%4ep_j-VlWj*2E?vY>P?U$j{~+iiiJ<6zq07fc<0E%<4(q4?mK`d>N2DL7D!aYH z=#r$pNd_%C@nbO&sxT$+2bO5Eh2~&tw?)Y)gU&4eo6LAR+aV{^h+w6hhg848jR1fE zOE5xQ%1x*Vnc1+qa<)R`Od}{u7pQGKpv!J!_QX&I8TtLSIvNid7;9FJCn{*$mboh_49qW zaiu{xPvW*y-mNJ09(|mIZ0IF^Uf3D_(rN_3fm+cObJ11ysKoV2R_~fCx3@d0f^2i# z2*?cR!q?fkbs;lKYP?AZlt&$Fdj@9;ZD}v+*Dd;0VZ_DYb3Yqv!^F?&^m<}HHeR&4 zuTTy)-k;&FVX&$<7zpk<2CO|KW{Z%es^yE)5qVHXsdJcih1~w@?d_HA*vktJb1$0p zWu7)M2k_O4d2anAN*&%=XHOq~X^i%YMbQAhwQ-t3M0qEWF8rD-PuveH3x`Rdr0ezt z47wmP+Z!Q0y)gG9uu>AC9)N+ChFDKdDC+IDp57Ebb{pJrNREg5m z5}$lSleFH%+RYQQbbQ>}&Fc3S;CcPb`$&e3PJcT1>Pg#d9Ns($_y8DXHLzjH2QSu0 zALHWYemIrBS-IGFXBeh2X<`g8`iTnT*eOM)yRNq@k9?DCKEPgEkLr;&mD{J10hjyR z0cSJMNY_5ujY-Pdm&EmF=oH02xTnHGXFybKK4;=m=rgBV$%F>m3)e ztP>d=|B@LfBxK!jjJOz};4uNCFZHU$5X`I_AG+bI;Jrw#pM7754v)dt#ao;uk6Z``eX zPazRt2|bvB?)zF?!nFnr`mmFug-&ayKJpdgDmf^fCK?99?0tQ&N{`XWLo*jUA`KIm z*&4JzRUD4a?H^;IOk;v!Erbs7Euw7T=EE*v3o;`jY7TZ*QB9Dy*Slj#^>FFBY1ev6@fv<;4>>u>Spu3==Ql1+OOe zjlnAFqHUFo?9Ss;^QhanHXqKBD?e?kvSm#_ffy!E#@;|umf5`aTCXb1NDH1)PCq*! zZ{hjbDIb|WZaR>OcLCFm-)&9b? zV{U4CFL{G@s(Z?7f3ZrNa{x7AsJL$_`RFo;#nZcogJ<{gE$J3*SKHO5y%y4UAf7RQ zhHtiEl=8|XEX?oNd-HOqTdYy|KXxbz;D>C)HrK_&_+R42RSb$iZ+bR_ojqKCXa)Yw z3uJ^>)Rw-+0_2$dV?4AGnj-MXC|=&vX%BZlA@F1=!prjXNp+qBza=Yz!P zxE!+0X%`|g81muPj@N(>;bKOQoZ?pW6=DXpEr{+hXHa@q@AA{-E{OR*-(q8k~US=`&y zmf3qcbC3JZ#z16;y=ZhAfJLMDQd{mDDB`|6`(i}$BR-w{{zs(imu2W6vD=b#f?IXc zFrJ1b$;3}T4#3m|z+7B337e2Uf}wX5>4&hFjFv_6{5i0GWe)g6saWvpxz*dZTYYMu z?;p@%D(b!`3R`$h;WCW`Fv4XKcDqAgX;8kdrj)5E+e|lQw>5x<-)+3O{J~6(k5GLD zlYH~f(M*C)I^_mETJ0sel_`Z8QWy@quiEQgCybaA*^xL4P~{H_g#EKmY7^PBE;XU# zl&c#UQ`;6jc!{SAEV`4Z3qQ9DA7DC-*P-m$_%Y8f(dxlQp_J3g2GCXh>Zxq`n)BJ= zx^UFkKfrMA6Fz=22tLrY_6foC{=aHM`kv-6rMK(Hl&BRw1qP|Kb;28TM(@?U{BTxp zKQ4o{kreCZg)wb{1#T!oVWY#``5mu7|BORH+y`uBO%2;QJ81?1VW+p)Wb_}boZj$Vr; zJ9y_V+b8y!4)$DKw<$Rvy7e!}y56miHyiSIe@5(aj*|0frk6!*a@t#lp0HjyGuCPgc{yC`*yC0H80qCK-L)G6Dnp0;C0T8EBK~L#yQE@{ zjvU(@VgrSN&tq0V^WRP}(xh4zL(-&AIznt`zwxwtA=|u`2Q=BlHZ$u1zcL)JInLEY9g)hSiNFx1V9cP^B|~&>O$IQZCHC#EJV~_jGSD5`Xx|Z zpbHQy5xwHAU?BTx&Q9XV09CMpN-^^S7J@M?0l`g;H8m?e_ivZu#Z^K0RxvP+WUSK# z!9l(1l(XEM^;}}a_7a?(i@|j6;7T9uEPu18(Sc4ker2y%UZz0->AV(eCeKqJ@WPIH}`A{r|I&9Wbu(1lz#YJ4o&vmM^>+j9EbUn*{hvO%{1Abe!L zn<-DX4#x{;-vcwEOYeh`_|l}j9>pYDMcGNH9zN5ia5of01=0i|?O1D^O-Su*WSv2FEi?_#P zakbvdRT)!)Llv#`EMVi5SW`>HbDkDC-v}Of0S0{G)#XWlfv$NcqVwew&}lm}^$dZM zXv2T`R_NVjE;<8r<**gz?U@(6J(7+ULd6Ulp1}G8`rgC(*kF8F386x6;&h@~R*))GLAv_U+lbHSR5ha3U zHfqYPLdnnAK8sZGx+s20R(LyTL;L;; z$p{Liax#Bg)5L0{eNpc`F=9z;3@98RO1C-=mtkYbhuEz1E?|prG+{S4-k(}Ceqx|m z(HpGg^k(>^{O%=;2lHqp4ErAETh@vzp8rT5oA-ed3_3>zh=}gPRM6;6k(B(CQ6@JR z?@CIL)7s&n875F@$O^RO&nDBvq!SAP2m5Jm7SSWxHdbT3!RKxYe{ox`f;!FjeI#T* zv;58nib=o96ostTOgW>c^jqU&tytns47~&M<73PEZ$n;mSex`U-=Roc&S053;Y0u> z1Q8@9&=lrWUS>Y>DH2LvxQeEH@8#)_yvSO2I#FUiGtPO*2_KuBH#D%uT9TTSXk_&X z=n7W8lPY~dTKS1jkt0UZPYaG@?s_|Nl4wS%tA1$3qJud>_t_hTZ_tYbpn54{jn__rycd7#*Ex=gT})v46136vv8fyjSiEJ1*iZ zK8;aH+?cs7hw9mAsZ!rz$R~qUGXn1z_*N?v1c5;mnr|^6HuB=7i&jGEZ()_e8~5Q) z970B={@f5ngsTdNi(I?=GpyLpx#ja@%ivmnH%|0C(8S~&C7Nn$@K;YK$7Z-jz&!LK zG$DLuH=h74YTDDPFw1R}Gx1vxxX1C5ak^8CFEk@!|GmW6I5%|y5iqvt7)A=>d6|qZ z6@3iY;2htK>)z|qG@*P^w9>B&L07oE&>53_<@k9f@-#sw%~6UF^b@ zCgJm?v9r`Y;v?exC0ha(P`*|^C0E&#Jc21gy*Ro|x!)lyIy3OpSg<;wlU!2)Wf6>$ zo&7AH;C54bGb?_SIHTjYl;xZJFsM5Odau5HZAJiL9H0b16*jJO3D}DTWm^2kC*@-c zJDbnR;~EK9B*ip(OmNEjKZD7mChzuAHnC+}>y zKz?&M($^bBDyOMzi3vYpb6-2;YK-9Xc#K`hi9$tZ(GG5P(VYKM6?8%G1ga1cnOUS) zg0lOB;>6PkK$H*GNK7PuiUuv>%Q|A8@eu=9fjA1~<}WxB3w)sUC34GcEwVXt&6|?n z9)n9Px(FL&Oig7~AzlU`g~MJcj)NQQlZ;TSdR?5g)?fjXr_BHMa)VjV*mNwSldY=c zo(r(IOt4*e1Zlpc;)jF=75>)N7;8d-8Q#lB7W6<5nHauUs{TJN00z0_gbO#ezq{lF z1S9McjHRv@ZBX=~IMsaU(EI?;9kb;g=H?E=CX$WwU+W_Jum0QOPL_ZH=1f&DGvx`d z*8kTHYT1k>>)r$dG*K}pU-aqlFCOOaV9v!4RMV(H8gWlz1!a*ucLr_(x!(AH<#JF4 z7&xm@fW&pwBKxwBCn@!OHz*6^BxCgjUPgj}w`Bp|ZL9NC$c;-2(c@YKmZAZG!TTrI z$%27=BMA}h?`lKNXDeNmABw!iMPX6(9Kz$Ho+k7L6eO51I?z)Xt*XQ4*pw&uAAZH+ zCrm)ep_+;xQwRvK0znh;29;aE77`i&11N>YC@z-BfsRZ#F51}>=HI6gA%61DvnpVX zzg(DWn0vQPfv!S5bef--0PgdPy5I&S)MJ?c;U?YW!j+w>(?oZA|n z?33#a!$cPd)RecV!Jz#0(9Zdd6k;Q4+1wc!RvH zT!sLibktiTpz|IhG8CR8N&tnPuc2Kp8p#R`I2f(I#BB<7d2#fjr;O44@H44B?`BWm zc!JHHr>ldhQh|P12+N1B?ILSg%~-3tpbfOWeHpo59a#a3~b&k##_P`bevDY||>Q8i~{; zLeI2gDI%G5-r`ec3#k%=GZ6YoEz4@UwA(nb0NCd!^77diAB^D;na#nNoAc29#i*dM z5xx^Q+%7`Tw_M=*N_&jrOsL9?x<-}xtjI2o? zbGp>rO4KOS(#K=CdXG_g!Uf#hdrs%Yrhv~dDN&zGfQ_|R2KGP77U;z-D|kzIfSGB{ z&})V)DQLhoO_n1q-`xA=rfGSMy`cOqM$B-ql&}4Alc9g%4Q6Pn7OQ%-&dV}VTg@{l zsnxeRISAOPX??lGMHW3}0k<=^zGe}POIc7*s``#L$IgSOzUnFZKG4gEH>8u&BbtF) zO?skmz0WL&DZ`|#-rf(BkAzLEK*5ppSt`wBr3n|I0!sy!XP==TR5FbX-K+6){kM`zB}+;-EAHpEkVbX8hiA%5BG*{vSf z)Q>v)zySGT-*QEoH(#3c4=6B@zMfP{{t4Bzqlq=GBepfqBi3*%;&6PW{1P{@4inIr zfS&;XZ66k6hMb-4d7i?n2&`GGD*(iUXU*seF$x>L!usP*tBAT6WND{|CIY=s@{dN& zPhD>YnIpS{tO(=s#cl5mYhEN)?l!|et=kb>V!+6lXbpx7H)s%FwO^LM$}TlF!(Nia zKHP_@YD9D$CYy2)@^#4GV-f=^J#Qk&!3mEu+i1cYl;!jh;|nE}ud^D1f%dOGGluc9 zZ!WHMxOq#}OVj8mZFwtDsu$epT`I+a2IipYsu~BE!@dMQaRUK=kOq7g!-1S!PH34( z_7TK0%1uZ>Eb~Jv!6{rRn*9Shr;()?JUebQxtuGcRsiIPc{2UGL9Q1z=3yz9KEw*) z;z2@DyrEYV+BqgLef-#lhkK5{>gtY30xj=_@wdMe;H##KF-(ZAFx<#4JQbOdXU7NC z=+qOFJ1)4Aed#MYqKq8@c=Non_d=}KhX>j0(Z3Yku7F%rowPbGjrJFa(JvD7n2m6LE(;l~Zqh3SYHaa;L*sV26lN;Cmo4zvvoA z9WB?#?%xjDe1^S?*ZKfIWT9YymV8N~Y+4%QeQNjE`o^bW_}}pLjsNZAh+||jX9;ID zUGLnS;4QyG0`+QFJF#`iYwEyU!S4CC8_n`qvr&pY$uRiaqkz4ujE%#4vKR{lk^=*NGLgQ;>kCz@k=BL0eW>Z2>;0BtHCp}9IYn`6 zG%BHgfMLAXSno@kGJx&oMo@?X@cqD2IU-fqz-ZRYDr-}D11wsB<{Cpjyxuh<? zgK6I^hP;bV7Z3*yWj`$51N(D~ajU`>fu9q57Q++=FUusK8sk!dl0vT731tI?wz1-! zbc%j8{zfYq+Bz)IG`R#R&ySO7PXTP&IR)m`H~hsjWPD4PL%g&*4=5Hj55zucmvhDN z6P@~>F4znQ)O9aDYV`>d{3sO?$PK%9&sePg-g&LSoUq;gjjCC8cwyTUR*@-7jxj;y zf1Gy;H;olx2STDH{k7x{Y~HmhO1-6^*Coe>+55PX`umi~^oC&D3L;@joEI<4HZ;Al zdpls40qzB$62fRM>T)itZ`Rm%?(UdT392+<=L@>azh?%iTzORSmx>#^_LTp!cI=qJ z#1fkwE7!a8TD$2SEBTviqN}CDu|8-$iZ}cw5GC~!7t4}kfg1oI%Mm{d7gm*uNV49k z>IKhFEEJ9c^_lG zcDY9^ItgKB$sGUrNVM{*jG=C8zSY=q9XQReDqlmHB242F9Zj9{GvPI)f)#h@PRGtk z9!X}hUKYH22maPdxtr}6vh)7wS<(88>VCzNFp!}8_(;A1}ox9@lbA0edkVP8}k*#9n z2k7Ox#}Pbb@0W)C+25Ca25|PLmZl~t`b>%2?bHd%q$W8{4QPgNRL{n$dKqnIZuZRM z7z(v?QL42K>Ns(~4?~=7oAxGsWNTO6I)7}(*p>;nT))jdWCG0P_$KgKpO+SPDYYlB zd#L0I+58@Gf`k`ohNV|gKc@7mxUUmZkv@9IpWMo_LF%r!PbVe)793z#rJ?ju#9iTZ zgA;EG;S!;;#SOB9rG)j@UEK%KDJ3tLg)9yC%)dKTT#}a0=2F`?N}3WXZ`d)y!7Jaj zud@d8!yH`{Cd7uC(N^IG^jMB{M{_FE$p5nlb^@#@BIintA_3Ig!6E6uWBM{@z`q#5 zl8At#ZLE*bCQqZ9@l8Nt=4JzA;+TyNaw!g2viTvdESg16$-R3#NLeW+6I3Cd&)ieDVcBnn@h+jGe$4dHEU}oJGiP z$CgRah*>Go2~2UvmsepBIkSNaIbk0=j}jDS>WQT1oR|{V<`ejQwVrp`8H^K7XR_A+m?-O90qmo~@kR9M93y zIeo#X7>9l=u#KqvdyY_hU(0{gyVR0~7!prvM*eVv2)W79tF16+=JTWz-P@4bs8G@6 zM;E(|dBgHq0oYgxPZ)+HCmz73eO2-^%c8K;kLs(>VJk9~!dOwjyx?0JGKvZaP3C*{ zMeRk-3Q0?V%Q^DRa;)7p*it!ISiW6Su8{=@j&e4d;FyUYl32`&Dr`TruDiD){B~0C zP^$?^THI~fqhgK|wb3*LqZN(9@zI`#vetd&nG!sLmNerB?ntWxES$wYojKD2x$Uy2 zHWDj6^(NA?TT4tz3{TSI`+*|d4MhCXaP<+OQI5eIvS}vA8DfT8BLb@$#_*h^j`g05 ze|Gxkf-N>9ypt~S)&U0HnhWa=2bhR4It;e~p7NT1L;DpvfURW%*qoD*UGuS3tohwj zZBukDtD;rMfD@6y7kDB2MV%_+oHgNT%aYax77&Lt0AJ4bLEnCNxx*)&IAeFTbiZa( z#JRLH*^d=>6+nCUu&4?&DZE*o2MWq|cTH|9Ksx+SSd5(Ml?8nr9)eGnvE2w3 zl(7{m7>Mkcrb}U&g#`~a4*PNpvEl?>TmM!wP#<$;u+)Q1`}a(2R`>~507f3kmB_u% zQ6pXU=3)uxqKEYbaLy@;5c}QN?^7o?>z%2=*lurtx@E8a0o*JOay*S?St^C|!uMaB z=2^rhPYohQv7cfkE8r^GD&NhLb54{1>uhKGXTzwJhls$vBH813*rE#6gc<8r&`qd` zs@4>g#XCcul?rN1)kO4FT4f)6v)bB+Z`Wq9>gr*k$^t$P-b2R|af|{j-@Ax$t8%V8 zb54gLQE)E*n~PpU!G@u`M>OpC4%6LFXL9nNezTe*J2G7I19`9K0vB*B zc*nT6e$^U^tgb11*rrf;*IpBI>w=yhPa5}^>PMjD%p-s1lx9F;ZX3jl`Ck3P8uCTx z>umK?4nV0?r%0o943?1mH6bgp|gmhHbfz&C%;; z^~}i%M6N0axU5C@fud7j>hW)NNmzsmo+M%_#&lvq?{?8_9eGB8h>G!nWARMWDE{UC zTxhjKf`&*mmRP5uG2N}Z_7 zBQ`3RPfy=t$|;orhQU1Xb5{Vn0X`3y!*j~MpHB9z8Fy1p?j%u8hz4a&Y;bEy@ zpMX~;hrOGUO85}G6&xH%^43K*2ycEDlq@*(na~ecT*hn1N>N#=VQM9lInjN;_GYI_ zlWag7ZQR zqHNQM?u)ZgIN4bE_#X;}d{x0+nr|Da%T1%J+L1lqmEoeRO`N%Yk_cc#ai`l(arvgS zY1pIaYAyBbJALSEC-c;~pl5PZPGsc3{2X1g2mb#0ygfx$qEZe}eY!{ARQkWA;q~J; zA>^1VySiQRYIwdnvXPXInfLVS2TzkbESMLkeEInCBesZdlxT8O z1wi%lAHaZ8@f-mVWTa%>rSJt*0L_b^-X6l3wKSRt-b$dJ}v(WAAaNP*#wdYG%A{kkVL)s@8a9f2)XIoxw91DGzSnkr@3eGV^8*1+vFdv|7S5r3AiijWY}U!};U zFTCu_sJLLBI5N8ME%sc8MQmg={>A;Niu+)5r~Pbxe)v@(GSWM4@Ci%ski#2y_0k6A z`CU{n>TQl5zQ8^_tb^XI3 zy~9DG(_l$5QK_ge_C6Q>drAcDY))^09+kH1;Fv4wu4X$?6^scTp`Zs~+v<~??v=su zhs~mZ-C0`*arO<=4*t~O)Yw85I5+UZhS4RMSS;KT&2DF=2(7eXWc=Bs)wN5lZ1Nf~ zlcGPGz=Eonr8VM+;ly)=5FKU^5s@@8zE25O+!$#JVPwk5vFJ$({U?9l=iWsdzRhfr z^leS9RiEEX=9w;u92bOSE`JCoQZy2u9ShG`Vwdk-;4c3(fYIuz)_nKq&m&UXwkDO{D6utAwt=2l(Ms}FD z7^YKjCC?`g@{^KavSx$|*9brnsfvTAf69gtnJ0G)es0;yjm8+v19H^L<;rp#P;mB7 zE(<~N3n}`9VZ=`B(fyT`&}R(yJ~PrT5;N@GZgMXQwXYFfb`}4~RMfCAd<@%-9GlNL z!%s2qe_ON3dt!Vw6;ZHoHu3v-Ef|r~cvs;JCnsl?x72D8=uVLd&h--=5~vZn2%71? zD&Xb>G|XMyk9l^XzF(}oYta}kXON8I1DkA&FE-ANJ<)Ea9Nmuw$Q7_hrfB^Y8?eax zyHSRE@J95XAfapCNSPTY-xSU#5XPD;KV1z43Dv$kg8O;T%I4UBlO&b!W?ap?!BprVmZd~m6S*hJ-O(aw&` z*WhTdQ4BV1s{~kaA^U?V=XCLb?CBSf!VFwsz|$AxXa!myft^pPbN28GfRq z2Zkts82AO_3b^p!`|;on5`e<9_zb>z;~8EdXBs!6&I3=Bp-t`Fzg%I zop-B$2njhQQj1nMw|&qqO^^-xt9HK|9*k?2!sztUE=x4(FX$WocKM947mPI;tOt@k z$r#0EVB+Gb2JcY%Va$004pDMjTMyG*kZU?!=1NXX zr^m&qd;TR_XMo>Lk&=Z;ZUE6)ZT558UM4C00!@y{T`tirpCulwCF%phe_$E-8c;yK zVL|v}<3#$5C`*X?oH#IAS0Xw%p)#@JPXKgR{^Y>o$e+97y|yoc3Oz^v+WHBH67^k5 zr}}Dyi0AxXH|vQR_D0NiBh$q~Lg-#qDgJzoao(m59RM*9dZ{Z|Qt%x!PC)<=OC~OV zgSaK5wVQEz>RR_-S*y<*QZeP8Sxh=-S~0ddr|26U5k(Sd$L{0w#Eli0?ovWp7~hqC zVyJn?Fs${Lz4z0t$m5)Q2%#)8*MHMw9Ysj`?7IOJ#|kzG3!?h_#)<);1ahn++|YUn zxV7nK6u_sL!Sj-l!S3iU>&~}-jMs6$$rf6^EZBX;-l7`GAs9^byrQHAc`i}CNx6u} zaj={;WPqP33%<*f@uNa*p=q!?uYr2m;NPFGPF6oI*3z(#vR^)XcK%(qLOz2BQ&kz&J;M|IM}>CIdISU1w3JUW~#9vk3C!PtE%`EQcdRHf_3}cHRkFtQq3%_^+a>` zC%HSe_jCEOk?Xfpdb+K^H|5kJgX=oWH_YX?iw+^XGykmNNjOK#puinC;4X@Vdyq9+ zZcLcHGI%mu2@R~Oc8MZ=2)F|cgPx0ZC!YbV;Za6Ud&^{<8~{dQxx1K-@AR0E!p)n}5UP@1%J-rJ*Rc*`HuhfgMTZH2u=uTkvh-t@fDUEm)epV{Uj*yc#}5^;Vg(pWt7R8o(xJ zx$FJqiEM2C{Q^zb0rmR(5~r`mm2!8<+X2RSqzf&Ok$L%zm-Q!mpUXLb2MADHsyuv9P5IUZgx&LEfFD&i-IDG?|h@yIYxE#m>Qknsb`qNM2u@@NIW&pRdSHMj+;WL{HG z3o~c1@jXailvd|HA3oiT=--6No?Lh*bEfhqnVX0^{*+PqKpFK2C@(kjS#rH!qL+@B z4gL4}ojNH~6-*Utj?%u1w-yjxw<)y@5_7=TR)xNK%J-ed-xR9U(1j^lCHfEUw|Q;%52MayRWUcrd#&)kUs90?FQ})pl3R7>hrSk)MlUdLNfCNzN+yg{! zIxePoc#ZJ~y`S)kbX25tmto4PawNGy&tUnD?u5kCOyyLG1#k*yf$H0?`F`0u-lR?J zb$@c%$M^1XMYBqcrnQP+icQJSb6-%1ROm&(Y6Y^`ZOzppuIZkZ$6Cu=FB8UD-X!aq zVb8%ZMnsHvPv7iIN`8#_#Da^a@*3?ooRua?MXd*IU{>h&fMVhQj|+hLbrPWTJE#1J zfqTxL80vwMOc62=N?)fYH=qyYlG8Qs>!ZU{3|~_K=yDIvk05${e;e<0W&Rg+fC|P( zgc&aP>PzO7m9eS}G>ranwCEcUs$b@p@6+Z%E0 zp3eEVuM|F1w)j9$ImyFE5IbeWsO@tJ*2Ep8nRr`*ZqGoTRQgh@Pv0 z((9UTh2_~bv2)Bf|A(ft3~Hl$+xR8~2p-%aNRd(?6fG18QnWb5 zixpKGuCcXi^!HE_8W zc|N749(}WlTVt2-hsK-iNvV#YY*pZK9bY`CvsgbH4*dREkArLPk}>s{QPr5)CLu5> z_m#26TnRkm{Ac)*1wG99L&%$x@JXE}K&MxU{!Pz1M2aJio#qXU0t3Zi~Z$Da#% zri$#Bc(x@anM1*DU+{sgfH!7e*D)X4tgk?mI+EoK>&e}O%;51^E9Tw0+E=1VON?n- z@>)!2)^P_h3*6$N;?XJk-BA=B%U0#p@nxg(&ztK9W4b>{T1--%*+OgsRFGksd#Pa6 zS-}7CxVkf3((44jL^HyA8mSqaLi=p$xrE*1ht$U5tI5sw=dY&*I*VZMH$C7&51nG8 zI1cn@0M5L;?Kq4r^F!Bis&Ce|))KDi=DT+ZDw;Ww^W8OSU5RnjOQBDC(a$Bo?Ll$x z7IKF)z=lV|p#&ZAjAfckL*6yYX8*mOvlvbgADn;l?>j$YoiLdtQVH{Me~2Xe@jl{+ z|1v;)$<3LUrcv}~3Q6G~U~Mn8k1db`@)DEAkH!99Xy&03{*}nRI`s)pwCA=K6XGBp zKkdl4Dl+EjpjYE;kK}DH@m+V=i@WZKgZ@y*JWXzypU11cE(XhX*2U>T+65Mm_MbV%&Y;S) zQ#d^HK68@`r%)ue$&U1Q5UkW!P3ro)zKF*&AS*m_5*qcw#jCB=8pqTRe2)zjz)*5S*;5A;9o=(3YE zJ3GQZ`jExi|77`tx)aP(hFuUj^8}HChp;vn@v-#wJEt7A&`O?bv+f(E8+ z??~KxBTlVJoxUVr-3tZD3a%9+9&0`Igi1rKBvMqrJNBii=?!?(%_g2N)NCTX)LyT7wz+Fg)T`g;-z2}Pe?=42yQh_#HuQ_xZ0A^_Yi!KtV-GZy&B79RBJe1uIb~k&69>N-k5m4oCcFn$sA9v|YXXhyp&< ziyIhC&LN}$(2|?lZ6vBeZO@)lWa69s99ra6KuI?#VAlwh|C#Yg|y>sYKCji zk}eG5^a>+q@8n&@E*>H~mXP1Tp>YL+ z>38X#@^Onyur`>z({8(`pO7c^cl&qbVNQaryb@V~2 ztQzFRiWdXY1tGL6qpSdq*gG;AtIhm6u(T*Xs=~KBD90%=kR1p`%p13@eqmuDW5v8J zgQYxGuU?DiX zu}gyF|1W^Cyqv4$t2235z!fxGPYL znm7oy3Esbj^r~=KqQotHF>EC_e0RWUt3&<)5Pl;GVhnU6-CVt!XvyUHU;$MlcPIy% z9AVR_S!(oNrXigJ#j*t!*tOXEJ=3dfwdGLYj3M2N=ga>qW>}eJ0hODlk3<;e!r-p= zcPdvuU-N*|mXgq#tLdUvA8H)-$Bt3-?8)u%OpCUZu^aJ z6pEO_u>!!7ns=^l*S+Ea%nwI68)#^Ee|E`{q1HttTK|0v3_-8JD9d_poNh3`TD%(w zz{d%8?z-JycoCF3Izl_pG~Zn=U>&cO*=onl41b+1nPd}3xO(zX6A#flfG5QtgrT@( z5YuF=QVQT#9wfMmOQNt~lmhe9!CYG$#p-Z0@q#Yf#D4wu@GG_07jzHN3{&Ay#=>_T zkkVxYqCzE7{d9HV@c<`>e@36g-r>(ryI)cFL`1z74XDhB_1Divvt z{bmtsT}TkZG|87Hr+&J$aCx zNMWh4-Vl&mLczwyrs|YCzfAVO>zb>`o^<^8SHM&VBcKfj(HH-p@M9P~zWZ_&Csfpi z7S+KvsS-XM!JhikTtAXZcH$~LE?p6sSWBj|t){okXX+)kQ(^AUXMtQ+dStG|ClCZYJ=2E~&mJyMq;D?{yc-Z+z2NfEsmaU&c2C$a3aNuLBT8BXN)N@b){LNZJf zDeG~@_R~N`nr8K&_(M{!>Tc>L9dxym7!aFIt@*k9-A#zJ`{uHKdGSX8!f~nHU!k<( zRl$EVk=x{z=cX8MF20QEGy+K3ERK&Otcu{uMuJ}!VL{?f_N?9d0QSXebIbhj>yhrX zIP@JBCsrS;;W@#tCXZO{8(Lk>47zS@>24yyS73YWyeG}eo9VQYl2do3Vk=fp<32w@ z%2BmAn>_))G5<E(@jf#2H*O z1J@j5Wm1s21?kh%Sy{ceOyUJ>L~Ze~sBo)t8mPNWXv3!|$ljaLV>?WWv4%EDbkXC? z$seUTg!bwMLyCtNqxmh_c164P~OPVf3v#nP{_w77X?{w`w#=|J_63Cw0y>{0Kr$VRq0Pw*be%~hruoklA~5&YT))Th%R|k-9*T3fdr8gm1ee{1{WSaOS#1GxWY>@y^jNdi zXbCNYjdCkm>^&4@z^ihNyA{GS;a3v{UQP1Xu+ zaq*0hl+3B*;&}$=SKV!_zwtzGOtHqr= z|IYvvRL_9J>EbEaq^Pdb{oAbR^QQ>3iuhlzr9y@$W%l)AfG5%VQ=AA)1-EQgE+qI% zigY21Xlo7-DadMLM<9X+cc2BF2Z!5VhBp{9qFg?ql22sIr+vaAoAx4+hWdbey0<>` z_(ZO;s{movhl5&hnvcr7R83nq$YkU{dThSj*+!a#-y-zsSvnLaFY4FsOsy!x74LXC2 zM1=&~xNp==8T{`@OClC0`TcXIr$2Ow8q4*gX?p|FKX<3cCtvB3)jj8Des02WeBZfz z`HeDB^BfC?hn3hDZ=<4C1Qhj;R1{$0_FyfPaCQ_;-zkY{sMBmaRAMgpU`D

UYNk zbI8W=SZ8oO8T?_Z%Y5%t|07ENcgNO$v&UPElT9k~r;|e^wnap!l$qI6sB~5gx*4FbzO%QU--v zK#+{!s;t(|Ce~)QI1|^9RV`9jup2I9zZ70(E*d%`8H?O`?(kfM4=gJN!udc2(mH!g zU&@}tq5}OVGwr|MWfs-2*xsdL9`GM5_`dNemW6eC(m#;P;k1M#yO+`i-*EU@{@zn{ zxt=rjucd_TmX>El0acc&lJ9)5B86~DF-`^^3w}YhwO3b<>9?Pz?zy=SqY8y6t+3RU zBso%)YDpJfKKo~e1?Y2-S=u$u`vMeUFog0SWd)^hOm!s}YrnY*JT0m=BILqjC>J&p z`NV__8;hFccua~7du}anYLx zsT8{5EhxHIta5S5DR6VVHEfG%>)JJ8w-C|6Pp+S~z4|^B^*J9wqzei;02c}w8Pu|p zT7qcOO@*~KA3Na_-FXZPXYhBp99)`i>!#C@Bd3g~{ z*nn3g2vyR35Qe7gWx`He81@<62w~E&ZE8=3-(oSx^!}etG)g)0PC!svj`yFX!U+&S z46b_V5Ng}5FHR<2R(tx)txp!Fsq!y25J_A3lM}k|eRvKuDb4{I)JoCZg9_N`G+0<( zLMq5H_FjMcWZRJXq!v)(l+daM|0kxZe)*gD_G^ry?0J11M%C*VZ^W_5h_!MFb2SNa z$yiVJVHgc^zxq_~zu{vvSOPV^g+%d;e{9;blJ*{-Q6Qn$E$9}3mEQBI@iK_PeOD#KC4Dw@U=lr}s?v$3K&(JjC z{hYs5l@TJ89t?Oga8RsXkE?GwHf1pTZ;kuaqs`x-`Y!B<0G8(%(07H-Atd8g!d?H2 zK6UU+$SF?wA6Dz|b<3n|NMP?Dr^+E?H`(G)hFB|Sb u{}hgfrw%;IJlDk#dn4eE zpCR6dG>}MbhqYsjf)ZUWhmrom3qC;g)b{r;`98o{(i9SLg--u_KH$};6{WYrM z09pSf*}$Z`;8s3)rlzDC&FpH~$6!mymDbzJe!d&qtoN!!%Rx``lot8k)0V<)3lqWP z&eHf=Ib!`f^?4n2vL&U+02EgW_JaL<_0zNM`|mPpdlz!W+n&s@8auEUapW~wTRS9X zRWC@2xvqIwmJ@es2wRk;(rHjF4cqD-L%a6bB8Ch;493q_e=!aJ_zQm1R+Kwi~$T%w1->TfckI5>X-6e8>)+iE*0dQz=YiDO?J>bZi=U_AohjoZx zprM&`FAyIGFNrYXh`Oz@(d??0{PKj{l?4* zLy(ziY_E67Jvw3iy)H4RAhBD1CUp9ydlRqAKl{dnmUOJw3HZ^aql7|f_%#`0;`DK7 zO+vxLD1pl2+@|Oh>rqL(vx<)SOs7plL;&@M)A)#P>Sp_)YDj#>&op`bv6qsZ&VN^+ zadh)G3m+H4Un}K5WIOGkADP~}ph1z=w~s15B_QNvaadJVm7>zWQKQgUa}1=96Y95w ztEsl&%RT_upn5J2BC%DEbU%eqwb+h1(l(u$2)@Rp0bzuZyY}HDp$fTDeR=1|_mia3 za4)>k%k!phR51CHm_pL^%CTGTwu_U(hEs)i0WZK{ui06T;p7!$WAJmLO8TmUH+J4% z)I(t<-uL#G*#RD7sxsW$pS-t?Ut#pdln{r-XmB0jLba#Z$0wsXusHT=obtCIBCgiD z6-UL5wJk!5oZ_^Q9+j8Vy0-NU1#6KZi<7I_nqoKUwVvO+a%Ek^-eV-ENpk zzENYosd}5SX@bY~T{NfhoIWIHd-qfYz|Dn0M2KPE>Ym|A3otAL^pPG*)GMLpYqEPE zN&0-Or$|lX+!{LTt{e4>_x&dK@k*%~P;X-y=)_)jetfts#kdK)G;4kgsssTXWVrnP z{{Bv&mbFRldV)evoVLXv41d9wF3wXfKY+z?_0I{8IiAH#Xp~r-*1&^zDqIBRIdfWqLif)ZKgRN-!+qTD6uJh2}@TCH=A2wIISPQt$PhYW$-C<(AqB5AK!`Yc51eds6NMG2hmsGaS1+kw2F3_M?UGm z`?+2>@kTxF1_~cWPkc(Sva)Sl=k@gqG;Sxzmy!4FFYiS$S5hX(==pgrm;3O$*0Sd= zsY3if;xG|=P8y11ojpMVQv7c}aqMt<5APQi{0Y|XqR7KG4}EU(-W~rBCr`)B_c8Yk z>KuQ%WtQ%(CS-^JQ9)lHAIH>R8_i)#7bJveq zkSvKMLiK%Drj^wl+}8P%B{<`t`~@I@P$Q+FqOkoG;4=ccJB;)aln4^tQQ&-D)frUfJyaJ zw8ucfg)d5Ca-Pmt8rqu)tu$b?&%&TvVj40D%w8u zySr0gPnVlb?xB$(Q*RfXmw)LWRp%RtVJ~NA%ZC7h08OxElciv#uzR!p*&pbW`SfB? zO4$gQ{}-tsvK*uBHGTxKE)tX<$jl2!F(8}_Y-&3?|>y_(zIKOqikbEjF`My@&V zQeYB#q}vVX&Va+|-SaN6^PvBgqB-ObYf%F}5`>bzBaJ`k{ch^;rWpp&ZAdJ?221j5%u`uM5&XxdJf5B;OCO|U^FMQHMMAd%%q zmHR&(VOXFzY*|p{!$FR_-7doZyt_e&a$+K2m#AjcNR}ax%`hdhoQk0;cXJI5G|^S*U+Xl?`kWUFP%rL_v8@ z4wuxmVn09{_-(uTUY9fR!S2Z?F&w6vy00(Ogc&1S#4J=g{otNg1IvAUZ?>SWQAG?kx)j^z4#qGB|L}KEN|C`JqLp z{GJRESS-c#9ReYce#gC6T+RZ{y2 z&qtUSy$TsP{vO(i{b%Ebf<*yiu{=ATLKh_jFH+*xm1)9rhrquLdQR_;Wdg;B1J<_< z4UGEt_V5F|PO%;~?m1uM6XcCqqdaB-3goZgx82~e0~*{|EVl6QBi=)`)d*oHQx6X; z<>+Yo7f=Fd|5y2!l5<3@6PnAWiAsGkKZ?r=Fd22W&4yWV_c2gD1Ngm!fAd2_T!s5H zca1UjiD12*nSJYhBG>($LOBU%Rg*83>*G!zc7^7iQA95yvM`g9MtXb}(CMppKR;w2-z=O0n|MO5MPW(P|J@%A z8c<9&ONt3cUmpDFIb8f2<+=NTEJD6}M=(J;SCX6U!)w1G=AxS`n>;|LI#1+fF#7wu zrQuAk!x4v}k>P;>eD^x_fg|lk@Uv#1DoCE$XJG6U-P<^u}ysn4!}023{Ie9&q4Pl4i5Biyy{L9e6uEv0#(@RI%Xzm@=!#76ZSax<4tW?c(|-$ zhzqzIs46Mo=1i_Xq@q>I0 zajn!4!Nv(=kT*rL)V+XUOk@H4NFj_9$jzb0hT#gDNU2NbH4}x=iyb-i9^Eqf#~5B7{7wsML?J7UdXGu8Om-Uq zln^WzPMCJ$0c(uwjJ#pNXA-na3YG~ss&9d5Dv&|H)vN9GjmYr^!vQyzM+Je)oa7t* ze)WLnN|%w$FzHpXf`S6w8q7w?8cYzfVDB9a@;^YfujbFowQc?Xc)O=-zQ}h61Lj^g z-;F=}xu-V%ilXofWr~DKdE+HVpzJws7+inB$Or^C!MYb+7b5(NT`KbppFB*DVeua* zrOdxR+C;fI()$-X{ppr$cO_?>+Tjoi?|CW+6HiI|%fw2VpmW=Y+y1sCl6U=P0p}S| zKBZ1)yRy|N`tUoYT*RNn73R96KPzmGuXe7DG~h08EOnVkhuIVSH&yeL-&$F2$*=Ec zVE}e=1l&v+^8?QLNN$9e8kQ^Jq{%Ba zO3B`#v*hPEt|HJ_Mj&+Mx16NxT-|sw3JrfNTY0uX&(T7-PcAwpAM~t6-6OE1M4|=E zZ#~H9{QO4_HPC~uxOC%n+OuCM~2)1>sW~*yP#gqJN!znf{&Uk=g`k|Ww6Nqh! zM$2BYF)@Deu#!>ezR=vi{Yb>&N*>{?Hjl}!8^9rZy#;2Nu!wea`yIK`=<7k%07ktRnefU`{KHhQp zM-27RVYv>^x?XxPE@)ty3NOD=JjD_gPgA857W4J`4^Znfu-SnOh5bo$D2w|GzCVAS zE!IZVhas+XMgL`E@PYl+O)EY>zKpt(8WBGJoqnrP_Hx-g<6J(lQuu9re7ybb+qWi^ z(tj?8fgDT@i3;O7+6x;W2$i`<{!jUfy>2)8OE@4vuQNsR5~AZiYf3eb6^Vds{239E z2|WN$A(5>QBd8cZ0EP=lh>vxkw}riHm-{(w?wF5j6CYkr-4dS><*WbG33(SfB~(LV z$=pz*TpG$oMu_1|;YjfffXQuw#(RmZRJzqm+*!66`#Wxchi`+k&N~w+b7?`W>IE^Y zKb9PG8^N5(HT?T`TBC~T@=u$XKW>I3HAq8$5W`kl3vQ3* z9*7Y?8)kEw#sg*UqXI^P%bQ5740bjGT8#oe`1#$@G}T;SHz5Ej3Lv-DX}Ob8UCD%~ z4>9x$d^ZP3v*NzC!5+|-@!s4vetFy9;&>*fo-EEOfPtiXYGqbp<4R>)U0;LWtsW@2 z(Gth=7yKIc`v&=Ex{t+=GnAE_kMG<-&FqJ(g!qzk`D(Bg8{p_HUKr@)FUA+hVXffh5npZdy>LARXwXv(<# zS@@{HJ?|j$jswoLrZQVtEWtRSaDgqqX#eF1-MX4b~w{YV;7}SeJGVLCRRcP_ku_X zWLxU2IYAH{5DAiigkop6g)G6y)Cx4GQC_wShXk<0XOjbKTTw&@f4)7s{S!S%F6X!) z3ew@)S*HfWU?@Z%{&Yvs&Vt}20co}}eAs{6iYe-&;h3)hOU7!%a|7kZPbf<;kcS^; zfR>=|W=n5du9ilR<1hF6SLypowA(bEz0%4o@B8p5in48ud5gKC`rSbro~Via{@)Vi z?(miCYu^nt>}a{B4NER~q7s!9*jFBQ+EN*%u{#G8B+rDNeZO;7c z3X*3QEH9*2wz0=C!=a->D*^Uq?P#7F2`1p_s zGMu8nGKb`FtzL>h&JRwZt6vjQzpJC>kKfkzbY{^Ds1<Vw7cjNPugFC{t!9d=p%CKO`+o(R#w zZ-m?0Bi$39zSPUWQRqST5I22tuBfGilS%jb^gcm3TVO^ePc?!)&!# zh)EJ4h1$}@9Z*Ho?19=^uiD63jzj=7q@#l)wH-zljrzE(Rs4i(FdX~Hgl8F(K*?a6 zJd0i0dQ~Tw*Bd%v3o;TjZQRs-+rVuBZh&0GU99o5UziW7^Ww%ZLDME4Ywd#PgaA!V|Z-Qj=fgOMbTI zDJFqHCNHO_d)16<+YKbdlbg&7le`9*Vi}e zxb#&YCjB`Vdnv4yZ)wO{~fd@c(~JT z5HTs=^#D4AZF;|uGgo;2c6ykUF+gtM7UOzeFX z^C^Iad`c5@<%;;<3sG2E)wCPG88r?;QOEy5%VQCjTT|x|Un@N^^C%wFf8W&&W7p(g zw02kJ(HQ&OAP8d*k=;~EVZk&vDp6SwEYv>8VB>7^Y?=H{ryO(SS{ep^{x@Q*`7Xfart|ySKyTj)L3RK|^ES(%hWrhjeBmC9L zomklWFfA$Rpp*Cfx$vNnbuoBRf2pVBK%o1d=@)Vgi)A@6pz4RTeG)9s?mNrIgWLpDS zOJrn_Lf#%mDfYdHDw39rB}PV*4=kPQ1BFKQh7cS%$I4A0KxPDfoJHWE0|rRw+EfRp z-`(c&O5jT$s`hXi4&63lGF;pLX0rOOPETjiwRIi&E0cTzh)(kGXe%bzg2H)^;h;xZ zjlOsFINZlcTji#gBQ`q`^?<7-dmlzAabOoX(weNSVpT)iawNuV4G^6C--(6L?cS-Wev}@ zv=1TJB7fQs?Q+A@} zr^V|f76d}NlLj&s&#Bez$EkzvgUoN%kqT799Nv7+R=fq`@ZqkEjaN98CVC}k$8)r{ zL_fDc)xGwP)XlB_YQ1^6$CGy%RC!r2_x6=KN8gY)wpn=p$M3yk7i}&x>EB|q`Gq>7 zN?1rdeE9WX);a4az#t({ezGU7hGl+S{w*qz7(tcS9OBj@BrMV_CB9Q{f)9oqzHJZt z_d{`ydJi)tSp9n$9&jo5RHzGL5ui{ z@*Txl45D2@Z02E_@LyNAgM`6KLD9NZ>*awTXi_N*IHqqjICPQc*i!agEnikFRi(sS zRTnN3CTds5*{-I>+=#F7O2|!}R!J^=eQrKesE5uRsfzbz)n4`D{~aIq(L{v~NSZRU zq6^s%SaRS$Et1joloHYfn$n)SIE}8vf%iRwrqTxUynUaVF1=ar%B(-v8sdZ z!aeIm9~jv6Rxf8bL&v{~cAvfgks?+Nr z4twoNuD4Innn(z8ZF7;+LPFWby;;eCgB%A|g4unFJ$N+Z%eWG+))nzBH%7X#%5s;9 zmE@_IZc9zW^7%!({|Pa?8yr#lt+GjI5F>6y)W9etjK0d=-cO81P1X`MmgH$Y+7%@f z(~Xqh$H|j+YaMfAcTW2^##v;|Q@v75DIpisT+==te~Na`w-#ase*1SyWJrMz2GBdW zuu<`QD3(td^o%O*ojMYR_~0l@oLzO48Mdo#v3%+@knB97hPKA=qQd}pt2=LYCb+7< z)*0_Sst5P5;{bO4@bvFK2Y+6;KHPa95Al!IpyiU*qM{<@&!<)Zev3HBB}ghDHMjPV z911hky`D|RZdcEp;}=4?2AR3~5Jvy9-QnEejL&zNde#lTy=gyAI%Qnl$3y#Y1j*XU z%DBd2pCE#SZpODxFYnA)+l_^6z%} zrz2(di&cI#dbuJ!?cPzUk5W>B{OW-@32nWHV*#WT2iA`W`$iC&f6@1@_DT#5irSm) zdN}{4YwZ0b`lh;=ATa-55`Y@^F?INWjtdD!C$&kVRU$2=fBPvZDGSn(TOjSXnK=~` zSCN>YdgivrWrpfukP=jmrenMOLtbl3OldmiAoVsh1J=SX7*V?UT(1>Rsb%ZS3pr>Xe5e@DC9K56$TF%lKI`FyNYY`3kGc zbwerJqum&BB|8xUMyWEWX59ZhXy2GBmjNXRFL}wt#l)Ci9xT3w9w#QdVs>i*Kg)eF znft^EAr<1K>lhqwo}tp7=QpT1sR<8+`v3}mI)uyHXd68jWv4Whq=JDM8pCA~f|RB> zRQH%yCF--k9q#t=El}YsV`oizLz@nv(WaweOb3Wz9C>kb!4uohX%ybGza|%j$!p!J zb4WF>pY(CWq3eiGBU|H5_f(O|4tpRoDBBZNi6%T=ac|gq0fy2QH6|VvZnFMlj0Wj; z0Ucj@@8R!<&!;wk34Y(vwC9qy)kd_*-8-+%A?hywj3$a%iqnSG!n3HF;9)G3sv}2b zmC0tH&&X)~pix)ATOk?(%~EYWs&L8>I0tZ*oLo01L1AQvpGwdlgu=wh$54*60F(UO z{WJZh|Dje&yAAwoh}X=~Op)Fy|H5>H%ON55IwmFScENtPL{dS}JU;KAb4kDg`_MD) z_-^EjjRsSk?x6u%G0qZ~xqpIueIc!O>zTDzW)OTDn6T-k#+d3HJWS;PG8dC|c< z3x&X$f)c|A%s9fSqxALLBXsc~i;no|J+uvYau*rOc${JK`?f7m@^90En%I5D=hH`Y zxZ+*PuT)-P6aVw+nRI@u7~MH1J8v>=9a>doB7CS29v}oh7c~PEz2Do~;xYzY<;}Hw z@^X{m2J?9Y@#(OHz9dAoO7k>k%Hs&w2P_zbpl~?9kO|c#(6}K<6qswxODI`FKEMFd zB)63rJBHuKFm>~TN^Zj4SHjwipMKi)G@0fKR+|x7w(xjOeJc)I4mg_1Vl;sY`sp}F zU>;dJbj9a}8Atph5!Z-J=T;o|Eja!_*OmV2JFXwzf6eY*P9=i}Tw4q_E+~1Id$i{I zKpHXdzj61kii>H#mV7ev%OrPb{tm>l-^-g6wk&tui1(;O6&{G*;iC)6HupoK$0MTD z$D8WMd69cj8&5ljSw)-4AX9$8v_qIecZPG()({<2>5N|YUWke+aTF;hPvCuf=aK+B z;Q-dcL4Dr)0Y<1&(jb%>xGkH(icowCMf_2zC=#PRV$6^m-TDyr@AnrH`^6$kf#-Y*4@VKqMZ+5!O8WaR)Y46y zv?k;*x_unk@Lp*N`VUBNZEf-K@dfqtNX^&_ZK0*nFwN2_cqknfK87>P^*nFAzsc{k z32Qp>{wZ0yve95JGHB#YfLB3vct-)o_ERitAxgfknmzSE@rNZ0aOv#=i^CxpG$v6W zKZRjanDt!-6tYg5@I}Ij^sdQ`mfLA_fm9l&1}(YhNi}+WZFd;>Ye&G_IE=obQ=A}3 z?#9UPU^3!wy)M3s7>jyS`O0BwAZSeDx3x%{N~klbiahLJecJR4lf;)%sJ#A=^XU1d zxVQ^FkJFWl)pCrle|6m{Cr&JhE~#QYczm_u=h4ZtU9Ko7xy3J7m|{a3V++@ zPuq^8(?Qd18-4POc!)@)3BfhQffFJd(7Is|ERClZDiV3GRKue)U;e$uS6*^}rh|i= zV{`LUO~cpxMp5i>|7qDBmn-%9V;EC%pT_946^4URrg6p04x&eNfOs$kF_#`o_p~}R zOMc4O&a>1?{L{Q#1o51@NN;thVYns4MEMqE0FTS!H8jgy$xM69Rzk589ri1$F?6qE z52$mJ!_;%T!3d?46|FDqr|_G}J!c=iHTnRaeNVwas&Hr@jEjpaFaW&}2m0?!l@du_ ztzZP8SF_HRmbp^bC@pOl2MJ!}-^2m}rc%E_*0#Rln&gSY3wAEwW46N`%~ z)h1mMi<6UXo#o|r349*vN{V-mJr}xD4BXbkpe@@dcXQmSRQpJ2jB`#Guy*^eIWZwr ztsMOmIlg71I23YWXFxX>YXO?}uak_5Bla1AvtCXR@%zO?$Kso{#OrPrqRuP}d0D{B z-rx5#@daW^EApRBb0~$ExQEHk*xOPEpHdYt8X*46&{`jBe1U%DDv!@^^9n@g`(D_m ztvxv;o&@r2Ov_7yc0@VXSDlq?8_d=Z)~w2IUoO69+JdGG^HNe$KJH!YFFf9y&zeX+ zj>|LtKnJF!s;Q_HHHvj*4K&VHH0?E)wD@-g7|kQ#b5LNr@EOiI>MCO6pj!TF(XXwp zP8vP&VGvM<$Ns!FI)c%=(V&ulfKajY%Un%RP-H{EMDpwaMEwoRz7t!v!0mv|w@7f9 zPb1!~2jM&20?NfAK=~K^*bZexpVC(Os0R79DZrS)wH}M;d$IhDz6v0U?%LQ`+ilJ#?`(N*|jJempP?Eb& z159-Kg*>AczW}8kAeYbib;ynXfP$RC0qi%=evEK=7|{!8)ahkJ2Yx>MLnllIKBAAq zYF)eSo2|U5@@f<%=EGtU*Z+tx2B%Q|{gz?WG?wBN$|k&wF{Z9Dwez zF_`~e?I7;AcZpxo(V6Z4OE8u6^z?k}lg?8o+|v!wTZoVy9_q2wb6H?z0SXyR?A8Nv zIZGW&q?JOAtWC0IIWSwklxw-E_<3#cNPP7mN_A9~qB%1qgZxZ|51$z%m#uzH-gXe| z%-*h2)Iz(xf_E6~a~9M5ZSJiP2-3#P^0JaFbCZ(4;62wKQ5X#3{NNYdP=N6UIoVk& zK{xjzQhvZQx7ZlO0*TS!p39ggox^zv%sEPs&zDz|T9XC@QZP!6vE$1)TO9JDGh?f} z&}m4ImzQYlOL?>7BFSvE$=2!VX@-2a$m38`W?}u6x6Q)Q$!f~=t&0Jh@#*_ii!Y5} zgTEF@_s$)Jvtqge2n+;^$p~L z24{dk(kVLcK=nCEYyvkG+P9%B#4rW=5T4LbR;86zk0Bh+Zp?_}esEBNQbG$^3;F=@ zEVe}aYA$xMvNc|8I^ik%AyATSJ{mBubV%C$+7+zyVfG9EK~D^maafA#LNY3YAITwK z3>iW)BezF=?5M|$Jtr4ADqrLSh2tjQmJ$Q@7X}Q%c0zF%PSk{-e8@$65QEqv= zO3L{`z{Cb!JLu9N`-=oax($F_(%~G|EAO9m5Ygx-El3hC1W80f&Q}Tr{Z2m zrso@}CJ&70VCF~7XB61;67-uP08{QrlUR2otnMR03Fw=wo!_F$lWAQK0)#Us%$8R& z6+FB_Zq(+&sOz*mc)l>%A}&aT!+kd2oExAO0-l~{9LrsKsk75B+CKt440wrI#+3lb zjj4DGq@8Tp&O~c8vN=8GS>RJ6t9hMZOus2UrR!(ys z@3si(M2@=9h}n*0&4~dlJqTEW_B$R#+BR?2-9TE@yDV_I4$Q*@ubIGg*&iWuTuSd5 z{-OV;Q#ktfH}XYImhjwz@6?3Kxjo$t#7?TR!iwu}CU4oE6C0(o_97s8|d%FNN}8);~3LxcOj^?1n#$*!{^ zUjOFbXd>a|;c0q0b=>>doB^y8+*_a5A*#b6FD;@Tixhn|bZl1b=FcA(un4#3uGaA_Ut#1(%jEK6$OZ%KY_;-l;lL6M)GCcrLFe=JQ zX>+5}e&e23wvpzwRdoOT&Bl@CAFBsXSb@ViD8Xm5TvR)|1L{uXu|YK9x4!E8JUi*1 z3SL}-X^V*#vXlhR*%OqG&XetvfrN1?ytE%$LmOUAoxFEw&s(p0I`HwN8O_XIe)v5a zaB=F%Egv_W3c05QmWE(0l??hHfa>p|6dp1Y?+mn7q%!zBOr(&gH&Oc~QH&o3_r z%Jw})z@zTg4dW}acJ500fBRW}K12|<Uuax5`LrFh5c~U3Rl8gaqjfO9 zTWraz9^uaWeJ2x~KJ_L8?iZ4hhj?AyPZR-E@^d%3ulK3|oHL$vN~SkJC*a)<1X$bo zve6pM$Ub&-acX@f5HzYaNPgGv6DA#`^OU^33oWZ0pi{}0Z`!em81C4MG)svgpvF$n zFxXG1rY#lD0G*2a_*A>M5~44Xp(m|p3D2st4#xAR6X3A6`ES0MyU5)@)7{>vPOJEb zf*so2Njj?QoLiNd=EjL=CIW+$2bvmER!yHTrpjLlD}Ug+U!tT{KWoAD0^`>0o`!rY z++S=K7ro_pi!uE#KZ_i3RI}u0ldXE&uN@3^E}zjgbzIs!k1nJ3)Twi5U{tch3jKXY zS1ovZ;9kzhE1x7r(XZB;Kg+Lh*=#)Z!?7n8W@ct@Ax6oLmjQYAw!cdmfg4&^T+L}| z&~DX?_rVoIwLS%v7KQB3v@j?BAVWFC2NX)3kPrDoW z1Qrgvsrq@3w_g{#jPZmwg(a_XX`!vXMN@%(PX#^yXvM{xOqF?P`y1 z&?&Q`kzuLjsn9oxWgq?C7S3!V@%s5|Sh>eH)9zbZzAPxtUar@E^2ppJ8(T>{obcR3 z^FBSy@aHi*ME-U)^0#Lb70n*L-fC+4dg;*?g*xqS*H&qC*r3)o&F=3$AO83_Ec(j+ z#Pvb5%v-!GC>xvM+ACz-%~7g@n=I6{8N95HWOlmE=5YI1lRk6%zuIMP;{JH}-ZV8w z`~{!fG>dQVG$_^d=DWuWHb!i1Ynk04QlUwtqU259tLPRwtvi&|3ST}c`iAlI=!C*n zZLZIel7eN4jKF|`wX@=WW_{K@O1_2gB#$~&iK`VY7p+fHdNUFr9J zhxVs!4;6NeNcX(EUautF?A?3kjW@r{wRk_#;(d_ueyhzl=SYm-?ONUS)AI{*mP@Sn z?=VSzs9+%Hy5@7R??|P?wf2lRx{~y6?aQBQ?OgV;(pNb>{x?_U*cLmd;quv{$ZzG5 zyDJiUxmP4(V)r~EPQOO=3zGLVo{;{^V?pKGUDMwzN$j9&>04~qaQXh?R>9f#yp}r# z8{cbpYsuopi(O8g8u`i2z2af^Rm_5>zAOxC+hUrhpUY;G`D3S^s@!Cu?yb_y+1N*| zfy6e@bwqlgwMF|rhTY|G|Ih1&ZCqZCyfq~=Uw6JnQ^|u=yB?oAf2l9AIY06EM@sam zNc6N;9P?WRs&z}Le#0A@n!S5lwbNWsq^YMAFJDLF^l_JmlZ}=PpQN$Qd69en&xdb( zva7Ap_;zkgOw6IxX>OGnXA}Mm$dt8X@ zb?D&1rnR-22N#qa?R#{2Eu(06x!U7j`bR2lj2xruAGY$r%MdRd%bn>z{Fayb z*=f)7p3E;@q@dLJwlV&Y<%6%UBWXlQ+z6dY7bQGl{NJdIKb=pO4kG9QtnAA{f{-myv@ox=a=e|)3S|6p7l$2x@ zgj-lX<4>JN?8Lx#v&NT%X+4u$uTU|R*yv<+sV(^&=~_=>`|Q_e`|C+``oS@MXJgY1=XK`OVGZ+ZS{E!F!F+%}-MI z@%(tx`s<4%gRG--luC!l_qoua+tHi?i)+yvXWHflt?T+?m`DF@fr@9xzP~?vy^=S~F4?P;^i46_6M z@W&9py-68SGWzScudk;q4BKX;csCllJUm-5`l8Czct1(EX0_twe9tWm)=gI`(p@&^w^f?9sHAZ~T2#6uLeO&D4yFG>x&p10BCzl(*DZ zwhdV6k`kdm3hr%%(S1I@0>?leV?x%3r@$eVKpbd^tKSMU5>_(?93u=X>K;n;Eg~ ztTs0e%{uX=YwW9n61~a#1ts-ISmxB)=Hcpmyd=!@b?mW@BfOUc8{bcSmgVVF6tdY< zO-(JN)4{V}3N|r}i&p4KGY*R9TA)X1WvWLc_nTS;@f$N>`L z@`15yA{MLSub@5nR{3?Obr+4XH*qgU=VKF=uP`oC9qa7<`}c35>)x{? zVr+VG@uG5-?Q6CsDrEazzdmq;y1F`Asb&WA*qP_OD2y$(Bm{y~ynQ=($^7{bFy-a# za45*N$7lVoCee|K3QBrQ)mvBK?E|(A-ZZKu>3PR6K~EB;I{BTP$H}0s13HZCfd3kfopFO+04f7pyaU1ZBc`y1p zCiRG&cLN0VzHoHySFKAi3$D#UcjfEv?;q~#>zluRxN{?Xh71|<4nxMxrmye0hG72a z{T-{eo{o-=YEKp#r>?x3V>|Qgc(=OGM{J1nQji#HDK(MUw$rq2rj!tyRtB$g9xYj# z+>veuHI~FY>d|0w%a_)*B#({0j_d6Ctn95{s9eey8(-JQ_1?TL%ud*`*f1^6qL<0* z-cutyuYT*@W4Ocq!5XT1*UJ)ZJNUmzw3A5g1*$(EutEJ;i5{pXSSTswGh&dVZ#Bs6tnaTrMr@}`kL{8Hvso2&@()@{&xzVb(6-J+2 zZVpshsnK-cgQ06XNIYizI`ZvO1iojtabdTUQvwEdI4 zLy2MH`JZh&Y;lVHMd6px^6owDb@#eoJm43lGHSA?FO!w@x_^1Ua9Kd9U8j|=LMk@R)eifr+tYGqF^DJ`_*M5wVqrS| z%F~W%-(EX~k9_&sJ?UDxTS?o2ZR36YH_usY`#5!$>g5f+CKqjvcOG@ky`Rxg$>%WN%}_0+OT>m*a3tYICCb{YqnCEqalM++qd8A2cNdw8GjpzhCNBg{a65>AcO`*Sk8_A^5>m!fZ z#2q!ZJNtH3;Z(zMZgnI#)4vvu?LQ*=W#*OG!K3S)>zNB=YPA`d#r%KcO2Q80WcKqb}P!HW@y*&?Vk7-&z)7o>8 z*<+Vi`-XgYAc=0v{vyqKQJ~oo9QQbLRVpFHC$zB(KSLOGH zd9IJbx6I1jJu}fOXpVwXgH8|jYgjoX?l*s@v~Z$}YWmHWuQ!Iph2=f*DGS`Q?X2lt zN%{ah9$A{FH7>*=MfZzY?QdGn5)(6hiH*Ix^3dI8Z6q-b^cPFw_m)s|x zjE!G+C-!@9c1vlvk$tnMdbQ`CdV76O@Tdu;6^F{*?7I0LUlcVEH?9A+s-)QZ{O4C= zr}aKGcf{nZCmC;bqupw^G}-#@p{dg-qwTuUuCtK`C|7*D>HcJ0=|~OD%-v3#)AQa; zx};!gZt}@)cC$5(hw%3mFWT3U=sJz{PR_g1GtH;1>Dal=&ASe5mfv>pry`TQ`|^8s z4hTvfuq_?=XEP31e((=>#iVBZ+D=((JsPkq|)cUW{O_fLJ&5b`MndOr_{_&T`mW`U4 zv?__TR&M@NXzsDvmi{kCf7LZ}KH^hBgJICPu4~NwWE!W1_hh!Ywa9Z=`x?hw0 z+|n+3=yjNSZ-QfXy5D|f-O|k17BMG14?Mki@=?xv-8{D~8}Y|9`l!C%J@elD=fQoZ z-7f5nJX9`7WwP<8m30l}-GWqflUARurT@v{{gmdj98Nl(%55YuS-CsHzujjQr8n=) z)km+hnNjeo{@yO8Qzzk_f^CBI3RC^u?HxVW_rE;rO32~~Hf<%DiEkz2J188^dZj$_zi^PFvKpHjMM5SrH?mu&{`$@x_7UYOmY!J}6jgB$6_kBEEN@gic>@9++R|D!!Xau<9@Nx+$h7J#zb0& z8GWoQ*}TNKlV(iiL3i84Q5k)2YR0)1wlzMpC|+lZxkaIm`|PyJF-z9GT~ij&a7VAW z-wMACi#_RC?BhPF&F<1$YI2pMO`B{~Pg65DI(7;-#!j>ztTD1rgH9EjyPi)jN$op1 zF=^IErJ0^h245Sr+VJK`1Cyl8{k>BLJ#im$-12Lr^Te_z5vnovM)T*dt2Mt_qr(fr z9_fyIz9DYVA&*h|Uv(o-Y@2HMDf5Ws>X*G9J?gWwx#1XzO@RAs2iU#*_{DO11=FtD zZJVn9$#%;|6T=kE8~9VBbKNeqMqlxCWV+(hvRTHT=G|_!t7Eao+}waw8RYdgxa6gW zp64THSKkiz@DjYVq@>0V63Y@FOljcf-fGw5E~^V%EWAh0N3NIoT=_!1rV{0 zHP(zSD6)T@F=FqFo=di?XS_1mYU{1`s)L>NRP~87&3dhL&AxloK5s?2hsxx#FY*&? z1}xQoaYn(R)Z4sSM59g@-8wqj;_ZcYW{HEdc63aeKWw(c$7uzr-AnZ!={`x3 z_wX;*D>xqJ5bUo%)FR@w>6l~d`X39NrS&v>$|U(Fey(jLZ|&n2Emyqa-b-Vn*{Xux zFK@I_nrC$_T(Pg4r%Kw!)j3IF?kC-YcKKP?8Qu)W_pYH??rz1Mp|iR!wt9AA+rpyV zGZ*)I6p}R|GcF)KCqQ0Xae=yOeUHw0y81=m=HoANt-dg=rQ~g=lFN_FYd6lF=`c6l zE-TdhsN&SV<^JJW;rb%$`hq`Zc8 z%|7|PRlk-sqrj`~$xq>~1vkrEJebx&;+vQ|>yv?=>(Bzd%DV2#60Zy4cD@1GzDX6~0?75H5z74ImdwJ@)`F_Y=G;7E7W9$7amHJqmU;pce z%(v8L*zP$6QzqC1rB3*&==;_WrJF8#rZpn;SgXUwT0Q>I@Yhw1ew$P4P{7dAV{L2# z8kSAiGvVIbfZSr6_qs#9Qd&x8O^nU%b=V?UxvZ$=)qU2vv(mDUO*R~RxZb^ixu-L| zHPR|QdssBjm|vc-f8CLF{WdrksH`sT_13yeo0PYB)xwAc@5VkIkaNv`xUb2^zzOEf ztwRpgP1oOcvYwmbmXc2s>KCdyMQzwp@U(7ZizkI^o;+ODCDJu8Yg>U{p`p5w=S%;8 z{Y%eEdZK@{e~Eg;R|!74I(p;1$1UC48|#+5L5}|^@tQ#9B8Tc;){}~22`TSQQk7NI8_*dKdS4`6XtgSWaRttKHsEu`5;9WTqS@3Ul zYk=%N;_vlp*iZcJ-?5Y}zozrco`dTxyAQ9+wv*RI4tQA;Pk+WM4*7rjsi|(&+aTLM z*|y8_BFh=CGsm*1kMFW*fbZ4O5ZnIKs9p#DE?ZAtmu-iv?TBrbCF1S`+U$ z)qm_$)A_5{QLMk%Cb6AjyG33^zC`{dirh zJ1=G1CAL*;yU2yejmVYAoycW%j%(5Zr}|&}R6n0=eZ)G7b>=pR?cw%`Z57+jd@wH} zXCkMfcFph`qqz)PU|mE@tcy^x>yHZign|qY`w*{ zh;0+wDz;s;50M*@E7^F#e2UzPT+8xbT^sEFZ~as+Up%*17qM=l?$tGy)tlSTT!{LL zyop>gugtZmQ5&o?+E$?*mW=i?=zw(*9kKpr=!D<@9{%h&{!KgwFJ;d!)=8|RSZA>f zqV0%n6x+;p!aOh^VhoYBhw8TQXLe9sBNX{R{!}mX|3-JVV=>-|HZH5T$bsl<%n|b@ zt10u#e6vot1zMt5g)Ue!l&a7bOGY;tbjP|Zl=1yvg={&zE_)vF+)CKa5bGw^mFvy* z7wty07j7rFm)k9JAaWvW4mb%i ztRvT#>&^8S?S|W>1aLb=+YsXj^TE6@Kg^TZhlsJEF}5+U%rD1;>UK~I+ZduoqGr7O zS3k04@H&?*o~Jg}W&3xo7uzYo))x zScSn@GKN%PD3*+2RTz$?h!I$q#YlYrN08m0*Tu4UDV{?-r&tHEPGTLozFcpy4Wg~E zeTcTf?dJ9?0n7*c0rMl;0`n%?0`tkdGQZ3-^DWu{>%e-5+OSS?)qt=6)TjFK)wO58 z=kv2Ia$VRqxqj^P9K$%Cu`Ra+IPP%_6gBULb@mJ9fwg9z5;a!E`XGRL6*XlISTi+X z6rc`_uEH2B$5ufDONM3@w6J81s{z{BRvk6%Czhd!{Tbpp#B=ib#kz34xPDwut}oY{ z?S<{4tNrC-chuGSAgx0qgPavH>}iB@20c zXRv+oIrv;`bF4kvqNu&B&$q#Lu0Pu|x2G%69pJWdtQ0k8e_`KYj@ZALH|CHzWp0^o zEnpnLdW{EkfC<1vU{V!yvHYnDld+s4gQ-{-p@;Q<1aV(p%9g|Bis#_-iRb2eaDBL5 zTtBWS*O%+f^%re~+r{nUc5-{U-Q52E0Q14TFh9%_^ToU|f6OEESv?js!Zzl=K2R5s zwE{D6*^T%9@`ZDLN1M9)sO$TNGGpjHQ%h|xsRhWb2+$zk&QiML%Ma;)~b%?+5 zQapy2Vp&|Scn&@vpI58{*N5xH^%L!e>&-U8?cnxsySRPaPHwLn!0l%q#MmI(0rSPY zF@MY>^Vt??1u)OdH}lT?|1&m#-T&d|&-7&^A&c5r({d*F6*do=-W zKl31K2h0=m#k_R~lz`3v^UC}(&n*Dvo%v@SSP#~vF2H_T3y`mZ9F`)m`llapKVFK* zb6KMA^Eo))Gz9qEO@ZbB$EKRJ?}hE$mI1(EU^u{b&+XO(#ska;b0caltG7P3Gr!C= z^UnMm0Ly^o0PDCCSOu&G)&Pc8Sc@fNoeb7v{U1Sgf3Xa)OfH|#!{_7k^7*+QTpzBN zXdhf(t~b|T)*fVSf!odP*94dk=7srTo|vz`0Q1)!Py#vw?E&VQ`DWgkf7YQPz`E1} z>HxI?)=j<&tfvU9{@stb4==@Ixg0K!%jNQA^=BJoTV$K$_|y^T0(1j<0DS1TaQMugov=Ts;Q-Z|Tq4i~85ax~P96tTzFg0WE>n0NZ9KKnds$ z^a4}>RbVJE0vHWw0^H_Fz!ZSFm<2F5979EaH^BOGKvr}1M^Wz$Sl{2wY0QcrcRyeIeYf(>-zfofeRNd3}Ns(w)yaHybte-V|Meg zC}SJS;_}3@MH}Js^7*+QY!h5Bt{>ZjXbW6_ZpR{k+r{nUc5-{U-Q0fWf%(t`m>=ee z`BDW`0OpbTR05b^=D9V%yf*__2i8OMf7YosP^${^Sjx!&tAF<+yB{AXmciw5xqOZW zfT({>vA-*RR|a|mTz|IZVSudf>)<=v{#0N(FdJb1U_W85mIBLw6##R|oUQ|yV^MeZ zOZHLLgSFWTy-eG*X=ACat&P7nv^vDr);9O*)vM3L!ot2rMn+OpR21RyGQvA@YXtws z`|!Se3?Ga4A?0#ee-ESJG$ahWLlBcFrMg>xRlx!Df>r}{Gw)%9n-m^ap+dHhlIuZOy@jd6d& zcE|BYjQ{Ph&hd%+sqO&RyC1-DYzV+{O&!nxw1J7hWIzv?39wIae5@%3titv+!1p-N zzJ2@c^XJd^aB*=d2n`MW!oJH|#l^)D-dzJdlPEPch0@c~C?g}CGBYzMD=Sk#c6Juk z1o3y?mybzLPZP@EvJw*$gtGY@d@epGpBr^JkGhzmZld3FJ-NO$`98Og+sW9U$+1Mcly8B2h_$n>j3otQUAtRXZ>3Mt$?;b zM}X_u4d?;%0r~^n2e3`c#(?qoJ_+~a2X4&~?PVwo!}@7~R$n!x*r`|&Y+ zESJG$ahak$@VWS$F)_D={sMJ*fjR}EZe~^W6k|TupWDIh;dXKRxSiZyZa24|d613$ z%oFp)yfJ^wBlF3;GQZ4oE1(6y{Id?S`m;`~S1mxk3UXMA!0O-qi2I03KAy`F^=I2+ z8{odC5x_B^IZ#s!;5zpNWNm=+fRO;_Ic)Fa03ARVU|(QfrUSEppMkl6U<=^ZWZ%Ah zA&9~6+5ci=?+8Abl9DXwl9iS5U2oP<)VH7@p9)13-lakT*jAWNMKa*;QedB|{rT9t zcNuwtU5NI;=VDvnb8}qax^SJiZd^yymHPy)H`kxr!R_I8i9XNm<#u!XnFr>Bd0~EJ zwP*gAN7lZ&?=$brKkLAHur91mb^S#x#U*z6(~r0xFXgKs+CX*v+4hRA@oR_o* zxUO7huDfUh+?NdmIJON3)PU-CFcI6Bn<;=Ea_{MQf#Jd}Tee&X2ncv1)jd}5^|Z89 z!6(^2MgL}<1ih;?W__hP=TT7+ujffofTe(3?4Mh;UbU@iAKt%6SeM#~U>mT9f+}0! zbMm<}Gt((8Ek&r4Xa`)^ty{Nxv}@OHKH9*Vb9=a5+`j6%GY`xM^TPZvPkn&u+B2`r zuju>Cdt;!w{{O1)qbT9?XZo{k)no%=KcIl$WoJd)f>7OG59WI9HV|2zGWC zzvT;clEwqJgH)l;oFCl08S&fk-Ur%op>< z{4tNrr>yp}ai4WypZrJKvzGtNN7fE%vVnRijKMa*`3J`$j!A3>oDYcoL3KOmir=~e zJpj=b`qi&re*orO2F{#0Q~&z)>v@Rv-#EwO9G-J8?t9taxc@FLDxl(G2CuXB9QW}% z{5Ma~TdFl{oRM({V}RR~5Eo35A(trF+lhQH9;a((4$)PYy>!KS4|%!nqwD9bDBy}c zh5MhSJGZV=YDy%Ije%Xn33id2n~azNdw?wnc2Jaq^XE(JAk?Eks2kUj>&pEC*PYwI zZQ(YdZF8G7Yo=PORxMf0nFr>Bd8w|u81I=+=2eXO%s2BcYVeP=$FBeMM^=AcXB*%= zK+FaBoMK>(pRQp2sVgX`$9{V}_H+(x2dZ(sFQB>Gv@`0wVM-0h! z`_J@?kq)3uM}cD-waMC8n~t0Oj$`=F+xd6i&wA5jI<;pBT|Bv+LT@-zVtlY*582uA zI9C$tlty<8GGTjCI}qx`^+Fvvf8;vzxPkLTj$a%LFsJkiuY%grsZ*z3umSdSZojBG z^TPZvPolpwf6OCmFZw+5%zU%<)%~8eVV$aLhfV*|r+PUx#R0AZL+lS^?LdqN+-Hd~ zL9_)SF0^jly0eCcMz2$+P94J>%WrXUvBKD0^!Y#PUMTpR)ZaPpOB3QW$M5vCXu27E zkuEyyAiEuN$ciBS(+s)$z8#%TCwqY`foDZy|fUCB!g%HZjiiMv`?Vtd5P+XiV!~?biMiK0Q z>n_Fuo4&^#du#`dr_O}n&FWD z(MPsC@f^IYDHhaZ2iz}k8)R*v1?*p;OP4Nf=Fgw6eC5iOGsy8OdCn~@H5D;8^SjR% z3;r(pJm>J-|ECMOXJ%q7kQhdOmrjs_#UiQZ@M&A3ws2m+`JuEwi03)w zZ+QFsWn5cO>d>JB$9IvN>Y6i$)nh$#EpjhvQC%bK@^AfAFJIOUWa9zXORxpxx^)^h zY*@ENix%||=eZ5smiqns_iudc*s;!mfq@>-9{pn?Wu&7I&*j{mb1K%ni0+k0wJ$8> z`MY!>H&0KGp-lP2}GV{=m^W3XLN+^;cgRz{X(!mALozbZ@^~QelA{c9uFsSUmBj2%0+&a`RM>du`z zxBl6)XWN8?gxI3}m3U+6ckMa1XWdImiUH`4ew6e2++6et@cFoy0QlQRAFXLqT#w>&1$?Tb2e$5V%5FY8d;72v84B>?eNOa9K=`FGy$ z_5~f{<8H!6!rgQz*l7X!~z z)CQ#ODH6sAZ0BqPe65K&KwfBj@ZiCg^XAQ~hd!YWu0husIB;NX^pCZX8`pv#F`pv0 z;<6?k{?|mWq2QX%FK)wM@@+1U7h}q6YHG?)pFX|T(xpplnV6W=!Q5E0h=_Ff}H9k-7|->fC*;0<5*L;MbQ z(t%bJDC)daZ`P9aP4d>EWFH+$xuHX;zT+v)5Ad%6*v`N6etb+Kz;2$mkmoo=I-P+ zZQ3M<*d~uTarvW1kIEzOX@q`$CdR`bSbMzwS%~?Cg`6|X_&vtctbKM?GNq+NV~oBZ zK0lGz=N(O@dD+EZrjy@UQ#yZ03;sSHzCM9&L3h?l)I8;eRO9r3@st^)O*0S7h7_lzlB_+HGm`BJ#Zdi!%f!G(w+5pD^X+Dt1a|9o7?Q-U&OPA`}*x1M)I&?@L?}3%W zJ6GkFELkGgyLazDaQq)t_g^?!adEL6uCL3XU2=%^a?rhAQBlzVd@m7lJ!qek!*i>w zJ;!?~IOYr5XD3rq!c97JY%?8Mi`WY7d3?q(_sTEaACIT9oB&F{F%v6L}MunY|oF9?#KJ{aeO?N!{xCpB%}Oz zFCDshPMf^!bSW*$jmokEgj|uw6>gRodtm&?{eksnJ|_Uu`E^mQS8eLhXd^O5V_<^H~y zN=xqn#aKs8=5Z`ScU@5lQW z+}056h076aAyf;zzYVOzM1^0LFn3gz>8UO!5cp8HiU#a>$=j3ao=!1*V~CyrC6 zs^WmvhMy?N+eOG_cwE3^0=9RKdCURZKz3G^a4!;b1g=aA3kx~#|I2?$OH1oKe*9Ps zc3sZ*U7>9l%VCUHB%?ie;A=QJIqAroqRIKt1|jC#Zes1V=>qb5Z)hKgu~-DgaJQj- z!sYRl;H5>cAKaqKclUr&;=glOnC?SSKDv|>r3MYFw1&U%@EFU- zIY8BS-p>0l$}*((Q3`ut+sTd`MUmb=Q31;T_@aO+-r}5Zi|Ez;NaP#iC>D7~6!Jrk zOW$n(<3vY{5qTccYQtm-^YakKGJM^D^8mr_t85@YFHhixIbyCJJ$f`WBO~K4ANzm( z_1D&jYyRB7p*@(ho!CW(*Py>Q#=NW<{C*$&9x*=v zF+T!+fBQUizdVjoytOFZSA$AYj#1_N2lVYjnSifvOX*o@GR1|irOc>dRFg;$5#W=(l^GIS};DGT8&qp6Mos_I>woU9nK$hB;!c zVq;_d(GT#P&|miR;>C;l$O9LmEx)B=URRp$@tn>*x_`eEC=vR=yZJm8N+zFkwsdec zawl+g3VHN-OZ4~XPyNAVxYKxwKBGkegyoH$0{Xmhxfs}`0^nZl{N6Zy-20xj{xzk_K0165UE(TOdR?YYZyt`8;t`gN zs_(C{d{s7D*zZ2}=i~TzE{Dte<2lOc-7{Qs3>%3t<0wk^Q>SF_F%*AU1G$M7a+Glt zcv2g&3wbN%qMZ=SY>*EgT&+#+$4rsuq#^D}V*u;VvG87TF~&Z3zxM-(o5}E%zho}R zz7P9dgKIkcXP$nK`-1lO1^qey&CSiiI4_25Oc&ANRpZDO9Jzuk&%N4o-AbE+?BGvM zS`>3ugAzPPQ;L^5W!zAs?4Xg9A2EUoZ;hZwxpvjG`P~-2zI{M1A7S1-ZaWobj6mKt z8oG_C8BfOuzw>^)KOe`(|55kf%jWYI;QVZZnSpAQdVLfnUL8$$E{vrpcTEaCHIDpj zwCT!WTz|r~1MUZoVmz~Nxi zCwrb*UowdmBAadaNn?7R+*Bmd)C6y~5w zw_L{3opb7xcu9@Yu7PX6;glOPj0z%$QSqH&bT4T>VsUl<{9UKY_YdjA3*_0E=P5f` z8@i6h_p$WhrN-~Tcizsw^M1TPANTjP=kpaMEx;IKIOX3QjyZzi$OG`lbG_6k?&2uA zb!IgBtg#emr%B#Nwdf-3*%{ZKPi!1VhgNCP;dRq+y&zh+Ux0I<2M?q%kvW3q!WfCU zLoSd6yWnesf0@sN2M_8%=Y^7F^LYZ)F8VJ zn&i42bGmyp>G}~(3O+HGA{<9kv@1A0H-eHcfn)EXloc?P^1_C|_lHtR0+2L}-ak)) zR*$OiSY}=+hfbVaeZlJO9r6NwpWt6PLf2`vLO~ z#fihHC~g?$7=}_#$WZhF!zlUMFytsBkgKRsgp)c2+m9jNV`J&^0ZlryON&l!)*>rI z4cfa@1J^QkA}%5ZU<|AKE${a-pO-je-e5T97KTzm^bq7CgDJy*2=pHU{fANX+2QE7Mo_5zDDtx& zjd(VOJa&(z)7!?<@pWTq|FSV;u~eI)!~GHS3!s0Apg+e)=1Mjf1eg8~A3o&kgFpO- z^S+laUygzAzmxTSZWqQgg8%cJ4_^z92)s_a7i!Q!19dvFb~HI}9!(d_)yZeSIt5s% zQRs;g(0>^8A58J*21EaW&|j6Z0|(OG2SHrAL_<=jAM*3Xazw;iHUCPk&z* zw^c1GVHY2f)7{I*eSYzag(ZLI?ca~Dc8qL!yx)Ty1B?xaVQzr+A56JngP{K)=syVY ze-QkC2=pHc{fAS4^+@tQJc=$_s*{W9XtLcfh7PY#C-a5sbkbrSVn7aJBliJPj+iUi z7=UXf?~n%$_~FGz1(EkPKn{C=bAEBWC&qo&pWDV`-HePhI=Oot?Vdl9jx1Lr`}L#f z%(hWYn*nxzMXarr_Gm_4jsgZ;6D6(2LiYynY(GGp| zZ{dN^oAbcZ?>@qui7}vxOLOq~!;b~uym`|R^-UFXezs%L_eK4AuIpBKFzrIy_bwPt z$5xJ@QyWImS<~TkW%qEpv41E9Sq-MJ;{z$uUX`MqR4C3}g_17xqjax+l8S{VCjG0CJdtGb|BLVw8pB6B42#N3Hv0dSgveu3|!`(Zy-v0oF{tUmKRFXyq` zmvPRA{!3^Vf9HGQ&RAQ}j@d(L|DvH}vw9dgZyZV&Oox(}#Srq_KZt^k44|-M{pqHy z3Pn5irC8TKl<3ii(ysKOEZ=^VAJU(SZmCjf0`yNGLQivt)5E+an3pZZTrlTvn4kNu znW%qFn%A_AwTABKPk7v4`JUHN9?IqNALXycoZ@g?GZ;$6v4iPuq$*_v^`}&yei)pZsE*(ZY=ME(^eGN)WNfhQp zS$pP+Ib*JvL*^3te}+BqGXp>D2X(KFI`8JZFBkW8@I0^R`(m4TTMF)l-Mj23+V%5b zI=o~s*{>N&ZYG1tbNe9jHXlfS`vy?ZVHFBH+Lvyg=tH+o_M$sZy(sZ)FG{)8n=*a+ zQf`n672X^`C9#93EOiLx@Q2Z(oRRdQJO%UeJU0&rbF{1<`z_l*Olh!5va3G+X)z9RIM;ETCrPBD%${$a&O#mdXe zTfkO>#JrE^dBygK`tw*Y*yl2B(NiUhIjUr}bRanz4x%$A1L^X1Rq`>1zwGTtfd|0f zFTE+;x+g{1_MqF2$`tR`gOV@wL?6(bvi`5v?25aK3RUx@pt`k zP3XG~AQnjV=e+RG@I)Fn*zfsx0mu!j^nYIBLJzZs!S{#Y+TbAgKJpRRV%l}YC)j4p znVuARx(9r}2L&DLNq&cW(e?ek>6&F9y1Yw;JWL0WtBESvuN_Fg7z`rId8)K^x+)#n zh`9vhMb-IY4w*CN6!1k(!1q@EkRM#@=>|J3<#@%p3+FH#&u}eEh-qxkeC^le(01CU z*Pr&z9YEFws&s1Y06J#^|1eb{?_GV#&$1Te&-9pf}|P_C*Znk33M79&jFjexNLC5n^;Pbc6p_T^E%01)~2$d(I7Of?yY{zZmbK z`#XlTEH3v!)=G@~hM?acguGuBdA|x}z!sCQ_M+JHJuz3MOyMWHQ}FTblC+307 zDejvnMSSJ^=zqu$a=&rVzk>6AG4_jX;U)KHY{&bSPa)H3{b=7j6|yl)_R{Yp>d0lg_dxSwDH zC3gnG1_t5U{V@7~xge=-qJHpW&fSFgFKYh`HXsXWOyGTleMP?)^p}?Nk>`98h6~yk zLi@b1{+NsEhij3&DB)sHy6pyAb?io=HcA-3DnWZC^4Zgsymoh^D?60Q)3gVj+uWPn zHufb4!+vDFtRL;w??3DFc^mv+(4X@Kq3;*tzjRracY(4Z22erN0Qfz$_wS4O$li$iJ?YMw?uh$cF=p+8 zv8y8a9aN<2dlc!~ZbiDfQ;9B_cBPA3yVJQ%%H(R)iyYSUM(pcD`{ws0v*~?k!=yf# z3)}Js{lyp{+Xp~@em}|&_kqpUhTe9WnVH}7ep&r_onwOErL$x_p(pK{){73$?@jj0 zdz0(hUUbnIF>7<*!nHnh$A2ZgdYD>+{^-wnPEgSKIefnwKH@um<9&pjzXtu^JkFpP ze?yA7(i>y_K9tJZd-k9^XS-9BQ&$SN>w-SNGx;Cv2<fY zW8No-`*-g9Ic{+rLrnPI|MRuYv(|gaWP&o8PwPQP=J%kJ%aqA|En=f_H}cxtm9B47 zB;W0wDPU(O3N-IP!4~Z)WN$kPJJ6nP{?dV>j(4P(lbtEvsS72Y=}M`H11X-$xG!6U zQlstY?eijGK1Vuk<9jqX|7Y!`{$DA?eAd3a8lDsX_C5ZK#|dMIs<9tzg6{)Jk8z>^ zrvVgmt~=dv??$&yD^Y}f7YerSL;;7O{oeM_zCB$vYfqQ9wI|Qb9nrsaqH`NN(;1^K zegKe7o1?$aAY14%6w0l~2veNHP4$HdHnYG>Ml5tnMwiz*VTPN~0?MQxR z9VlQ&I||&@j)Hf$rO-WXDf~b?*g$)!4cK*}*wcy>>7Yn~&f};g_a=S*6&k~@f7cOt z-1qq)taE&4zyA#FKfkDka_OT zttoVG8;UsCmLh+F9UR4eh_ex%>*>YgyY%$~>nhc>68Z{uAZS@Nm&2NJZYSt3YX1_) zFshDY+u?owpg-FJ%BZp(F8kZ37gX`#8NGT^KoM8Blcz;T6}qpI=j9tv~N#th8@UhRVQ*--i53eb)f@um1rlp*f>FnR*pfg>+k(N zZZdbWIRJAD{R@9k{gqI+=d!sU+pnztoWpLPHIg>#D3awgMd+_crEOK5#uZIo1mRA7YJ>+j~Dco-S?%iO^6%Z{|o!F-Bjt$WpIB{?Ks}{;nf4mh`C5- zclM!6+gsCB;PO@l@-$H(j|~cRe!T*nTiXgTt~Io8OD?P1lGBQgbkd*`^zTIb=X9o> zQx$3Bctu(<8hxLK^B?pVW2cw{GRHrt{ypIT@41iXF)R05to_rcPlY9aXZ?*Q_MuJV zJCTK6M>;mI6P;SxkH5*1sO&DT)BlQl@IG)%ht7gFub`1EpeuY;Y6qO#aemKQvcI$b;=Wb;@o&73U<2a5 z980S2FFlUW$z^{*e^7Qe4$qTMBiHpUk;^L3g$*sCeM`i)mauOHa$l`LZmV09%gQ!% zYI!@dU)q6=E$Be|XLqC>Q##Q`?T)luy(3*Zb+ktAm{aB!9Dn!`^?&{Pb#K(|E9a?P zSB_gX>2Ekzkv3^}AWJ>8e{Kgly|g{_Z%>yIAH5MPZy2{G-%YK^Z<7Mhg8VnPpnxqc z$Zu;4_)}|&^4v+kJ}<N z(3-uN z!{>RLYG6$Tz1~0w4%M!T9N6**0gSHD_T0NHMv{u z7W9|pj=2=|XO4eR{ky`yKkyt6k5{Ve&)+$JH5}cMHjYss^GOP1HA{gU7a%`cu0WU8 zv?T9!E$BLY-PgDo`4}}t3~fho*NrxUoAUXb-*9fB9$X)eJ5}+3 zuNl8DFQzzuM>@Ww1MIvhIWKKSrLIZV~_A|DgJJguXAT>;L@ub0G$>{@j0U7~hkO)LYW7326WHmgF!WedMy1bP4_B zwRO$tx=}N_WY~l*;y#I@)KJ`u$?+bsp0)YyJv3k)DhQ(d--GwZao;LnFTcG*SyjHv zdENKij$;G1^Y7U2Uq8=Rl<@)OBBwx(!u7wGb&H&EZ=22hCgixNDcLP(M%MFM(4ko^ z$b51OLKrjciV4+Rf;&UUND*uNk=;G$YSd&FHFOQ}hMR@GR|m+>4zjR-ve3T*^mWgB44tH5!Awddcl-@l}PwQ~w_0d`yQ{t50aEg;|1Tjd()6WYwgO1fc%Yxwc>?nyT8=`O&%c4fFn8~5BXUdurIF8uaT z`a9OeeR(P3^IP=Kti2Gcp)cadw=bL7&j0dzS)@B@*GB|;ChV3}) zk0ryNm)exzG6(JqT{&N1eZS`d+-C#a-;2I4{Qgnc|Msal6nT7{fSbq0Q8*B01^hCO zLXK!rFypW$?p4yFpo4f1$w55xbx@lE@yuV~;RzIYWFqE)y3^TJ$m>?MLLR7qJfJ!J zzX=_g(TFT3H==FYjmU6RLs~Sjp`d>e-V?@L$?_)a|G0kc@uTXG7;h_^|I4<6ZJFC~ z)ybNc4X95W)f&+5@eS$d^oHbw9Qne^mUL}B#{DL3C=m0%!MFx+$6AXL9i~x&gB~TE zoI(lqKS^!Db|S^wPJq?`+XS8ujTd1z0s8><3~a|T7T-Ch2!78#E&#EfHUAEAyL9=y zIG&o;GNO{=X&tGOH==)@?|eG#b#V;SFfMN_|?T)klc^ z%vE*$Id-ydA;;SJBkIp}LcI-Z(jWJPf6rxZhx*gvzIAEcuzIv}Tm$Iel$;h~&TLH^ zjQ`tX&aWc{nc|wt?p~DSG@Fv0ex~HpvnlD+3`%sI278zaTbN9Vu!RJ?_eB6=M1lx$ z8!t~{pDGkYI#g->N1gv@6XI|0@a_-?x^+UAZrSL<4)A`N#;Wc9NG?Vj9} zjK?;lWrOR|&wc9BZUepVv7b2;<0ku!s6Xb|X8x%9BfgJ<{*^pe#dQ|_U$!m9MTN9x zR0mo!q(1G?Zc0`&6v%0D8@h<=BHkulFyG&mLU!POuOoP#(tQ!IkWyXeQ>x26N^xde zm zing7CdyfE{DHK_Sh~rZz!g>nsO_U-G%dq2k*N@F~U?zp1_?aT?=F(03`4n+-0j>v* zBoCvmbatH*xvWy8lbG|in%kBvrYZ=zpTXdUG^w*{~Twu3o{3$p}UNIpG-k_5mePP1Lez;Qb41HU77Bw+ap zb23uhWpe>u|Ks;p53=aC{q*mMwws1~kEDpgGU~)M+?ULlE+FE>42n20i*DNejOP{R z;hOXUigH{`QKyzt)ahk-mO&5KfP2!p4L!)!usiN~P{OtEPPALEEg6q%MFvBg(a(Jw zQ*unC;PcECb0+#fbH@5(ex(HZ^LsRZ$Pel$5A7paf6n!I?8(pY3K5c+!mR91qwbi&Ma1CO>BHRPg zm(CkY?{#xn*@LVWbfw+XJCm_?J6blPHSYbuGYl9bi`t9&Gl$G2>yP{YuEM{5=y$^4 zJ`Z!gR>}8!RM%f@Q+9d^ZPe;YraGPI;4CF_SlW|3*7wD=z5cl8XCQ^`#j|vt>nQt* z31CcFmp4$>rFE1AJIIi=1MVB#&^Ng13vq#C1NRd`U%~xF6_T-Jq+VE$K7eC?mF}2V zkk0Y(Qo4+WPgF)l0V z4m7`WW*xeQ+S?!%@67LGL=GIM?(MKc)ZGUKAg*PTX28a_ZYz2kFNWfwoUAc>z_Tz z-k=}ukx->;rh{?cC!YJ;KZ3HaZKqrxGXP8PZIt7+iE^%Nr0h%U5f9eD4pveI_YLQk zQu>)Cz+%LPMN+^n(yEZ^#x^2Fvg<;O#S6b{E!Cdqgnrkbe~))wNQpoK5bq>K9G0=b z9bvhMVw@ODD8|`EvqM@4rZecFn+jf4CQF#b_l;xfAu>_eHLlJLZbH#N65!f z)qKuZId75GUu+lm2~mDmXzQe&v~PAlI$M6d{_hdE_j?rX^}{>dZtSK!KMTtD z+fDht=9G712W-KVa=kWJ*}*!&4l*yS5@JFIas{b97zq7@U>C?6ICl_{c4jG_6}j@K z`j;kLp(M8@Qn)UmL?A)HQY@EJyo&+uL1!$dIJXrP@4kxS&lpm|x%HIbVGPZ;P|_t+ zO1`oaFvoKNyD9OqIbGR_XPfbStDEr%;d!USiwDupnJTnlVsF~JV*2-fk~tE2V(yqj z)*s{k1oZ!ZiFd+5`y;Hsm;-X3#%;nq0^i$KhG(4*Ez_kvvj>v((xJE~b|mhR9*t)| z@ZJ>6rR3b$Lw5rYQbFJWU_adr*h_cW7H;6335W-x9b{czPg$HNASPs9K%Q`6C2|GC z1_3Kz6MsaOk2&Uc5bHViGlc5`(skaR;cZSSXO;^{#xhC33M^OPp5v92=nkA&jptC- zP~y3@lz85VlAwFCC*Jvc*_2YQ?xNIdmUu_eUSL0-2|PfFS1jr3ZY}cIu7PKN@cg%- z8Xa6bjCRZ%L>qMRoT$%5!S`i3V$L{MVh)*0#QrU!kNxON*y~W#`Lmb<%IYuL0LOs9 z3r=M5^H8!{qDH6J!9Ps3=sMn=8FFYMW%*cAL5MXKh90A$(4$lo@(UFPvpwvqvV$G4 z1H^-ChzD1Vktb|G-(V!o6JQTn$Qv1%p0EoB+Xs;Du?Ek4<9%U*{yfi%>oq*zi|@QW z_1tPoJHMJz1+1Zz^T4?^c;>_qSPQJ9WDg@ehqi%IE}GzZ#4VJ1Wjm#L?ZmU`mX!W~ z+PeGlSCsHk8;L?QE_XYBl>a5{ zuZF1KqV~)F1`utWC$j1R^$AMp1891W_L$t|Su=PH98Z0PcD}=qw__d( z0_UMHXf6r^=b&K6Oymbh7bYT4df>(OVasr2d9a`0KCo67`m%kf)d%SW`;O@@y)2}$ z?;w;VdDhCD^Tp-_|JTgpmL_gNigQmSpF%Grts|U!)7GFjlGgX3?O9(WQszmnUmeK+5JZNZm4?_8^n#lWscF{bnJ24{8`Tj(HhthP{Zz{hp)kcWaF8 z+yaB&dKv+4Ypi}zUEYXi;+@J6ZK^i2?*Gl({@e1`nezXDdT_FCA3*YV)PdkF>oKtl z{eFE;KY0UQgv-d+;5D&5`%=A-AG8F8Axlvdx`bGaqL76s45mH=&O(8u2a~7=Lix(%?}h)nhyGdEHvYLU*mBFBFQPS(!tvjVW>uj_2qH>oatYOtX% zQe1?7M1Lf^44|#aSF|}Aj1;$Fv_Bk)G>(45|;FP4bHHdnf4n96~`>A?gJP~gsf4lCN{8{hEa?JlXJLYpXU1a^=i}F8dw*hwlMzlKW zKxR@L{i1cG|ECW0|I`s~<2%7;`UkYDo{OTW_3X26K$#G?p67Kai&=xxDC$D^QWS?S zLXll3Y**Vb+3F`)^@2D1kZdE=Z?x(M&*3%wN9sjYRuJWXobor;xsunJtUoe6h7rS& z;W5Jc2Fdgsg-p+{k?A!KnUs09?-Z+^$YEK}-7%j&=oZrt*>d_KaiTw>HOP-xhrCE< zVHXi?$ zpHY7=Ck;8$WA$aF*LFKcA7Pdb*gC&`qZ8)!=}6y?ov>kSXLwHISjwuww6%4kj~{oG zC2U4TqC2q(6$u+r5x)WDan96*)hLZzQL76JYjt5Z>xJp87p7YJknhL-g70{%tx&%q zm){Zvq04YmYl$d-o%2JMA=``J7~bqxc=LOM-yS(W6OrRLnV5!Lwi&qrb7*TkANfIx ztndYtd12&Q6h*IRK3(VwY7^l`zi{rf6Lv>#q#ONJ55~sHA7eH55?nOoJU^?=)O~cXe_FMATIhWV@`qtpH{O#;# zZf7532m73qcR>*Q9U<(Cgf2%>gcFLRXr~>+dMVD8y6J{e>Sk%8CrXmGGOu3r3GIb~ z_^n9Yz6x7raqMqmH>@1}3HK-Kg4qK+1T3cfbutotuz z9S|mcmR_shFFlvsrT32YKpB0YtsdVOPNTY#XWi-hwb})8l2N8k94(!ef!0ya#wK`Fw4^^xmDwEw&$@)P$v6FSgPPQ2p@y^r@ zXOy!aQOW%~r~uTd0PDIumiO7mC|BDO?^4swB)Xv@iEU3Z>l@WSX+HEde9dPK-p&!))83~bao-&a8?e*h+j9(#O8T)PFltX41 zDzn4sw?6{A^CGY(KLXYH;Y1j!^FmOa8;m_Uf!Jf~MAkO!&hVvP_^__Iyl?imn) zo!Jqn$cms}$S`Dtxzi?j1l(qGU#O`Auwq;vc&%Q5J-c_ElD#9%j_p3n$w|)h^>bj| zf5lJI^*4SK#-6SY5HB40p?V-AJ_4>Yhr?~w5O^&d!M)cepdf+ue@+al@?z=FKMvJ} zaoAfJi@ilc41Y%xk=R=hfxXm=y?LQEdXdMum0ZrPOE+?MP(QYx!alycJFDheb%}JR zCY-`kpK?QpFzk`OSvnWReFS2-M*xu@%Y6dkto&5w#-cns26=J*2v|K4TNaLl>#U(z zGkFllITvB~uF4PlfhbAhJ}g2a_FG8c?>Ou$CMe&1 z)Qx>b(b!iQMMPS#bVPb`3euZeFkFo;NvEV+)U%>^rj@{bBN99@AHJ|^0Wd0UXh*a#ekj8UOdetTA*i+0k-9>4% zO-n^y!VUy)m;;}cli{&=0yfSY%ROj3v2SnnDcRSh&F=H5&riM_p`YiM|7!(bOBl<3 zL&|>w<$qFi+uwtB`4Eq6KG}7_uF498daQ-_vMKOiHJv&z7g>?s7VoP|GqJZUi#~s{ zxOYP~*V)(Lo1IzQ7evUcL50p4_@=@}I)DFw1J-Z*Sq9_Dbb#WniGN>twv}FWX%_v> zW>NOspElBmcE9uCx0-soY$`lg%w+o&c}niKUfXnuR+W9b-e=!Pdu05keLG%Ay`=pA zz`no+>VUD^0mr_etpkqy+Rty}wCG@LTRRT{Yv&-C{7wvDe?FJ>aak_yg|r7;9u5eV zdH6=)T8@K1LN2e*0s8b%80&q;p|~tCUd6w!A`g4Z^Q<(>a}tr@??Txxfd9IA@OGNb zJ;K&tXKB$X*`JOU(Q4QG^C{}tbQKUYuqEpQoiz)y8R-7x{XynOfFcMu%) zK)j9eamDua3lXw$38FkX)*j9AfZ|N-+gXVHm4&pmDx&O(U6eyD4)J_w_nC2!&+M@F zS?>#qZ-1pwXr)obw6d6Hq^A>tH!VfLhD8W)S%&nu2uM%QmVaCJ_P0L!`p#_k|10}@ z=h6k%dsk8i0)DOz*n02{>(0W=6hwM$K&aaa+ElGVyx)4{#_eF6z`DDt1P6AN&`(6E zg@b!aAsqS{u)`j#eOGZD*j0-CjI*jJ6FE#Hj%kH&S&d-#l?dA8gv2m^?5^5*O76<% z>2&urvUla&{-4jqr~l^X)B(*Ac(5HXj`e_Dh8%fWm!}8z@3ZtEE?_gu;#$ObuSfhg z7q-W~-0v$6d&*dM?=HtTdn$0Sx(0`M{sENPUc%v?6^`Ncj^0%qC5$U0!jEygBF1L} z!adg^eCv9oMDkmtf_n(m_FL`z*=Obbl+M=H`J$0^zUFmSu-yOYzgf0Fmo|PA{)zM4 z^Qi+T9d*F5PiXVh@%#gC4`3(vQcREHxSPKl<+KTj+uf18V>5NZkKbisP+PsLI1AMk z1y*_fX3tI>+*@TqzxP*_TF(@wEGG&1d?$nNCIxyRVf$t)-e}*A+)vOOd1?F(++B4> zxhs!$-b904?$zI;z8~jWuCZ+IfB$p&^S=;|W#S^vdGz7;!FT7<0q!dZ;dHvNpWj>M z+*>m{J{&2bzLcLQl7qI=SC|)4L%op}>Vxz!BHY(1(<;~cE$ErTqy%$c@nA0`^4-M1 ztw@X9fdcN=w`*sa)vuFm9n-FxpEJpx_1poD>v#EIkmvc*$g%I1EF%Tf0i!;$5X9}zK!2ZUo?dKb=;Yee-fAoC$^1t*R+knL}-%-e0f`m(&M5(t{&Mj^K2#p%B}9TE$_Pb>s~v?sX9k#IhJ?3 zuBWaZWO-k}HJi6xX!`5ptKk6KgnQ@%Z2`X*_MS}-?6$#{U0s=;>)L)#XS;1bo!s?J z$M2R(?sqwja=)WqPX8VbuuZs{$5jT^_B!?vYGYc{4V*9ZBPi?_JLfVbr;+B-|Svob@}waa7|sfnDs&Th!_`XAJx%k(bmli#$z zjsIl%)aSjP1N9uJ=RiFN>N!x)fqD+qbD*9B^&F_@Ks^WQIZ)4mdJfccpq>Nu9H{3& zJqPMJP|tyS4%Bm?o&)t9sOLaE2kJRc&w+Xl)N|mMbAS%eF7y#>=ju``=F#?Lg9aD# z#)%&;8#EXN9B{n81qgP$W(=bpuP=siyll|mC)YoUo(X*a$?2IkLYXo6Mp|h9zRNu^7>H%l&7P>ByYz6dh8IudhBq`)OGxj zqtjkHP%ugX95PV%x<#Gm*0qC1)V;n%;T;I9dwp4*=hn4@Mx5)~Di99gPkSBw1O3+X z|F!FL@#kF69prafdDVUHpbwV3&vosfZ!M$`b+7B{XWi>_>Gux>sJ1+uQh?g>Quq24 zLo9W1P+*IEhii*G;1q$i*QW?*L27uM%J3-b@`nOhdueB|HhZV4?Aqi{C0LvMsRVDa z->*$zlzm;xkyDp7bxtj{x@(E-RCKj0Z}CHTD*}g0kR_*bqx(#8l;iXI-!=#CY0#iS zOJX$9u)(eT%cFJ8ss3+02YzJ^Xl?bcfnPb>XN=yaMSHafvK0_yC!)Q(1o{22p?w`3 zAwN*(1IIY@+m7E(Q|SwKev~&muR8yd!;Z(1UYkzkMfCiUkgps0I{FLoSK@Dk>|=$? z9dHGI|0}THwc}E}ieFG#N>gbo56X*pqx{+YaO4GV|4WBxx9L`Virc1F=_(J(pX|dL z5myt}64w#e6MrXeti??{-%Q+62e6bWL$94bd%mh-T$Qv?`yX>1E<| zq66_y;%(v`q7(5R@jmeZ(YY2~couZ6`+~wK{JVtS??}8!v?pFAULwRF@kRU)-=!Q~Y?1kiN7eq<7MX`v{u{ z(u2zh$?#I*TsmOWuW$;lGXFP1vRD11c*SFt)2E2mgk&MPC{L2z$AoBB-bIIK?ANei z!vWV^bIq6*En2MX(xr><@ZrN_Cr_T7F=NJzJVC#8PtO=;1;dWkJAAhre**)bjehB=O~2%2(SPHO zH;($~qmR6toSaHSLqktSM@Jz(K2Cc;AUP=sv}GiC=5O88GldBa4LQMgO8D+p#xaud zNq?jZU5O6~>DgPvtHgz-Up)FfaXS4MZyzKcBbpO!2zyz-+m`@#M9-#>i9gb6Xh z!NJF9V`wt7cWkBz$3^XFHCsYTB2lo?7?;nKf0hNX4 zuD$l!w?~c~=}Y+?mn_rLQZ1TtbF-0`Cy3rmB7^j%6KNK5`CIq$_)K9GR=g2E($Z2a zy-=Kt+lOhWK2*Kaf>1e>ETs4M5Taf6-!;S)#9s-?QSy{rFCtE-`@C%(HvI}K9;n>> zi4YG|52{Ql{#yz0;XdM_-~ayikG%Zy%RRlly{l-uZ^=*c%+JpuM2qB}LfIyeo*2Z( z?m&2uJA(Y25#+lTVLLV4s3(j{dper z3RX_;hAnHSkRQGlFY@xz7*{qy-*=2#X(+8>!-h?|;)*M7WgevKl8f|R^}S>zy_M`F zcSqXKCVK`wbvW`s@7v`;d{ErdtIL`+X>#p~6)Tp~|CvcoPov)Eps+B{l3P|*B4Q$Z zuztZ1?v3;!77T8K<)hkS?fABEnbH=UrniIp%(fQXX0(Ir)V6S**cMJ-x5eUNZ7^p* zJ2*}2fw1katTz(K(@e&lYo#GwP@2n^FJE%!op(wWqFM5huG(}f&yG6(b7|*m)}in= zA5?BG=JC7hufP71d+xdC(idNR@vj~p9`jVcsm>}W$g^md%u|vh;kIHNW)FD5qGjC# z(b*0@3)>-Jc{>EHYKPFZ?GWZ%gV1&D5WJ=xcC2g%za{Oszf3!9Adgmzl5UV^v$-EF z{nJVp3X7y$xlA*i^qQeMPP`;2D5yWvH#Y4>l)LJ)=x`q_8r|8_>ot=)VAG5?@LxuHNn`ZJ zc8K?AizKhMNbzZdRNo(v?A->5o^289-VPBP$P1@-@LJFgYp1-9#lxS&ydke!bwzGY zDvB%~b}{XRIyji09z#%DoA5#RKVEJ~A@W zRThr0|DZn5uYS;uz7ge~#r-T;CPjMz>*_3o&{yxQey?EJu(sUasU5ac=20#$;`_bY zPa%T7Ti-`+cykoTJ8TUDQihImF#rbVBT=RtR!x3lHkSiji$Gz4vnnq`zX-hsr}< zUM`d;@?qSsMm!^?pM}Qc^Ec$Pd?OKsNKZ>BchbNKb z`7|QeKZ}4Rt+8oJ8!Q^s2Gjezfh6uHr96lS%9HX&KQ8JAT=0WFNE%Y+v*;g&GB41+ z9v0s>ubz*o-CM$GXe;h**%E<^pF#AxCy~76ab)j!423bz;`qTr(o9w8d2?)?)dH(WwZe>^ zt+0GjKdY}%Qc`T?NqJ-4H1&cL&j{%w=PLT**+ri`7VSziDIpFMKYJ1LdOd>;qnpEb z##0Dc@;G9hA3>^TQ||No2<87c4pzF8uAQ|IA1ZL{n^KhKanI-MkLY_p1}824)I2kX zE3NDDj84dpe3W}_KZ01-M-aUHarn+|2A8qTF~9FK`1+$~5gOpd{1jVxQr?tD`UO${ zj2QVy~S^r#Zxi9%RjKR-;;0~^ABvB{wP8gJ&c$&4{+b`2T6Yu?kD&d z_p=#5ddhK${euWv@-Td6 zK7vhSpTM$#Ph#T7Ph#%y&zPqID^JRs^2l~c-v}4%knWSWZYpEad-_~KR%Rw9bbAHU zK6wnQ20RLnF-@>->H`R#cQ2w>-oyRV??uk`2f4T6Biu)?C5|1)tF6<>2kC-%;Q)(n zmV2w-J5T>c#L~4H$l>t`&p;li|zC8h1Zk^uxZT0SUK=fO#AdP4Eg61NQ#TH z@}#^ek1PYu7mRpDMBV=jeFRm|N060{w%MpN^0rnKGlbRfJMKak@gb|S3%t`jBbjI zZ4V&Mlx|)Zg&Fv3Qds}@Bk9s??L#=JK#V2R(MXh z1#Tm6#ri>az^TvOcG(h1xO1U!#3|;L^$1n?elJi*TlcWZNv@O@YUb3w%^TI(epOU{iHEQ zF%SJ;f5d8!MZa`Yd1X1!xXcAPSgr=LP6hjZ*8at6|3!|yO(-TT2I)}4)T5K424c2~nEmrlq9?L$z5pzDe6{Fw04Lw@lhV*3a)yBPO zl}F{3bzt`kLOdg3-9K67O8l*?thCBl{}=DZgtu8ICWcIUriRQnt8c}wy+gnO1UZ_1z<9)S}F`m<~{PSzE^3%()yz7-%^wCwA`Q9}c`Py~(?3wEj>gQ?Y zO?g!QsQ(%_y&wn6)O5A2qMg3PtUf@`=GS2S>sMjfhgV@^k1Mg|%gf=``wE1w9e^YI zOIW9|Y+JN^19R+KnB&5CHNTJY-VxF+9gwVbKDwXJ4*n2M;q~6p16Ek+MrryEII*o= z^5JDz(B%qDf2R?Ky>u12Hfw}1f9`F{yeW^$E9uvG=><7hrbek=RowKmX4U^+wz>sh zzjOtbesDQlyI+P)1MWgz^hTCj(sTR>aPkCTPW}|UZ%!N~U8Gg~v3Nupk00hc$A21L zap)cHtNjbzV2je)n&f^44V-^xWm>^62G=kKo>|wR_&2!rxEkIAZb0zFClI%&Jz^KUhM0LTA$sl$h@SI2&$XDx^E`h0m8aC`w)9)H z*Pc^04@2Z;k^ZBKn?43D9&B7V8GW1o4RhYP7OuT-f$xyUh+WhH zNlu-SxcptjFL{f+c!PKyu?t^w!0X&=enpM^RUg#i=(l?izTi!S&UqEVvtLEftXHvP z=4%$V&u)*vxg8NS|9$vPe*Dsq4BSc~uuAuIPlsWp5*4 zSw|!+twH>fj!0hL8z+y6_QR)8#r?J;7yT0v3;&5Qws)aK$o!7PI|y0$KEjrMg76ie zBXn7JtYhDI$>;ZCT*uqd{n_i`vtfmmC*@6fWZNHp!3eL7i1d$F-$FVj-_SZw|Dpq? zcDkQ!^P}+lx*0M&h9k>kC^Fp!BHg7I`S2N1)_hD}d`NUgGIb+qCG~?iv^&3+cGU%k zk>fuLvCBR{%+e1Ky|go;mUcnp(vJ|iyep!ddLU+9Kg4brObo^P!B1jw&qwj~+Yg}6 z%MYQvwB%Htlt)ox-6z`ROLh;_TliIHEdH_`JjT9?v@H`@=PyKI&^#1upMgBz@yOXa5?PxElNbGw zv9T{QT)#v*d6MHhkPurhvYD^gBefb7l zMt4NC^KcYLtwU+_T9ihuL~+<+6b8;lzW-$8`HUqmMj?C4Fl2iSK_+=}_yETM*tVBM zuS16W5Tv^gWf>ZQ49_vh^qz<;zZuBc!LitTg*a5+IFZu-<27FT^W9;;&)tSvO9JrxT1pmD34i< zGU`WV%2s^0w+tnbE2%H5EWIg<+kmnJH;$Qjq9WB773uyQ2iuO4lMO`xtMX&-y=f6RL8!pL~8astck} zogcyd>bYmV_NnI{^}92-5x&@!?!*1dxz~IKf0IuZzqn8R?wk@OG?g~nq%!vRZu++o?vN05;0bMR=RJzKXX_8pwV-DQ7-lc`axCOK(LU?BKlmE%%b571_-(ysU99Z~w`< z;1Tkm%#jE7-V=uo9Yh(|foLB(?OT}}AHsdDQ&3%=Z|zliu$p^S655AW_w-C*5*Rii z&64x6&uVw zI0KLt?q?xA!k_cKLDoKgCAsOSuBx=&sT)Tv?Hb$N!S6Ax$^6Y6Y!feG|7axZ#3Oc{ zXy@N?Pa6B)na-yRtGY#TQqR7n%m-2S7i?_*LNky&xSMci-E>0oQr;zh(d$Ut*?udm z-qE{?gFHRXF%9IW4vs-QOdVay??j5Ori~O$HZM*mfBU&DZ|c-O%6lQ}xZgxM zKOe^PyZJ9{pWE}ikbTEww%6Z_ACjvhbWdf6x}C`I^GNq=Y|}637{>YJ|3be*9$dn5 zd?)$PjNdKKvd(;#bU#U1-@$V8n=RKDnh*8y)pMYp1N9uJ=RiFN>N)WL#{t`4zz^r| z8#K6uSEH4uhPD?B+ux4uSLd=~J#Nt8(qQX!>U?qS^_k9>7T1Ko>_F|Asp+`oM9q1W zoo%M(%c#4iw@mkM+!TsRc_)mLgev0+n z;cVyMB9!#lXN%w#sPW1%3N_w1cv420J&PccVKKmRk--;Q=o<8nWE}^r+cm$>`|mx> zZ4=@{!uAs}`hw4%F!K82*W`e0?_k-fFd^B0=vzX#q!zLTvZ1lkp94F*&UQR@yta+W z`P!M(&9maSZLRd(Cwm^t&WPvB3E8vB_DYaFo+D)YBsku)!|FY~FWW8I2PjV2Fewe& zR!B72^eA8FIuu59>pk(t(LPLhkUf@cMl~)go59Ayg{N^~H!Ci-3CeWEkbh4_&8i1@e``u!oHXCDyn5$_QHB-#_N z5YG{!OLI*cdlns%vu%ea8YBnlM?<3Sq5IOCKM>MI>8;+E%$0ZXS!ujMyiF*tU5V~Q zPvT3W@1>Vs`W5ZHCOq=UBlDXzYqng_Z{8cPXAIMa=uPw>J|jeZUHHx*zAG9&B_tQg<8`7PAvr%xG$vHuR34On>9us^bo|#n zJyRIfF?vVuiU;C_ZP;sU-_2|q+`-G>5$5gXt<4# z9GVbM&@T7cciwqtYCwShH?$iy%ooSbEjwgGTRmPUBQqnxWTeN@elyPc&3k&rHAI;V zo27SXbDC%v#{tGQg?50_PnBcw>MlaESKe*i5nrX>%Hx?1TLz-x4}@KgRTiX&H}iOK z)~s2c(uu6BOwRG;BA+&2Sy@Sl3iHC2HB+#9dJin-x`PG7Ud4i;uVC@0x8O9j8{Aip zMR?#QhE3pmvd7J_WD*}AYZ&Ji+Hl^_G;So6Z@cah-yQR=Jf7{a>teb#W684vuOXobd>j86C1VNEE?4bD@V08JJxhF zQCo)~dQ%&d>f736?r0^OOOqSg!pQzIC!~d;eWpp<)*1<0+L+)C@0wVj39?x>3x>T; zopPrfB+oqBSR|W>h=>{6wrx9`p4(abS!a%WzeqdIC*!C7C&uD(%YJfTxZZ$&>a6JpR#C! zXxEm;Z&4fZ!OR`>rb$VRlHDKcsT@NZQb~jA%U^VGuA#H+Mv9AyDBpZ+TsGCr?bFJ* zO=@L=mOg`c*C$P;|Kp}2?F;6=+|ZtjOS216o;TU--`T=^yXRT{{?L3=6>DVUr{@RC z-As1S6G+(nqzPWx9Gj=LGz$l`HY+Cg;~MRJD=nqT^=j&uocE#mK*kY8{Xr>hpc3Pv zF{xV{tQ^dBdDEJ4&E#W9+}MO`shZNJsyS`%a=1QS_IR>4tKi(T4|W#K#L;hxYwXix zpGEA>AINoAk0Q~dDMDA$4r9iXST(FUCUkv}cuW~s_HsP$6iAEC6+}60 z2J%gLdTWzI`?2^<5161O55Q~c!)D#^$ISH4p1^{UpBvf@5+$-1(%K>QA~VQAw;8ffoxxY7ug z&7*#QTWVwjc5tVgDUN@{r29R9$TfEvf5zc4?r!5e_yIGgJMF&Se%xfFC0l7KZSud{ zd1an2QHJX{S81(lj|mUPhz|E-N%woOZMx>4&>I@ zGtn-=&_+XcT$cSxtqogg>L{*Tm+jhJ2wZS0wobSi8;9MBHT@f7{%7}KXoq{T&26of zrqbs6rRC?7f6A{R^B>2v;aUuMIWIG#+Be4X?spjX0XG}3p*I-6(X>UHa=nRIc(aLd zZHwK-A?AeiNA`w?s|;;%jBGIU`vmJI-9Nl9&m;v5Fx%(f3hzlbm`y{k<2v8#jZ?3i z%)G9*VR-vH%&O@ltu&Q3^(X8+^Zz@pXDFub7_Onf`nluGm^W_5%Fk{z?tQN}TL)fk zdEjIVQ zs>UX0@U6%RTY(eb%I?dyi>b8*65lO5Fy)_iK(YxsW&gu4q@|kc!rUf2N4uZPvGn6B zG3))SG4!QtF{SrAR+>tiWuWkU^3QhbV$Rin6Wd^tUE0(_hH{;3r zCof=zsZ8S9%cRX_XOg?1wU|cOTM(wT=jNnnqaDi0w$(`shGkeA(=e zSlHVN@8eN5<@$ZxMlBf?YQiwYHg)>&ZXT<@T?bT zL-GOwrace;sV~&nj?8!in}#&SvQMwaq&Kg?r;q;;?#t&`X)0~z-{}|Rzdv=*S{Kc= z_-4=xw`2YXHyO{b?lv(C+nbcNUAgZ6eM1`v6Tjq5#4qk(;sn}@a1Eq6dHg%+7ET;J zXku4&Ghy@EBV=xS6EvrT37qpL?LOW$A&WnP@3hy=%AR+bN$=c@Zcklp(i3B?G?g}s zufCDb>mdJ|P>)V<9fH#JNUc~=Kmtbk!ss!q{l~MYL{kgZ(1SAVdR7Z;i3Ngvf3${9>pER$k1(qTzhj0CwaS&47hnsZIu_lvi z=(Be$JzxZJOJFyNuDSHCaV6u~AjqB7t zCf;)ac4h>cU0H!#NH+*(A$VVOv7*;&`H)iZ~o$p*G9nh_aeeoeBs(Et;ICiNioK2 z@mSg-Z#KIN;>}*RWvX-b7DloD569lTP*a^7Z1!r6dTxlR&I>cu`CPMJ5Nq}n#+$1A zc%+AUn9U1DBQD6-ic{;hn1*4$Yc<>cU!?bSX=nbgpiZVyH&3l~F6G!q@Wv&`2-}M4 zk_@ho);ei>-E^kvol`hak;&&-*jJv7Jtg#?$9MeK%|}jZ{Hgd9H`~kv=3V2L7yKX% zH&eE`k}=ov8LjI(uy3!)j14sj0d6Qsk2YKnZ4T}(!NENxMmV&Gzjv3IZ-~8ID_xu( zV`BX`nxrs)RPU~`;!s?QlXZ0_d9U%lU+rLUr{8rIe+rRaoydS1KJgK+G#7F9r zC5zfMw1#Iv>uaswqElg{FQT2g?#^!vjd}jR9;~bHCoPj%zB8y-$3=_ssJyC-=(j*R zk5g`Gie6a(rrQ_q2V z4*XAXpk{%Qz4WL-10(83HE3`^=UW;yDAt)PEP{1r3u&D>8+Fz*!xb~Q!dvn1{S&(8 z?0U^ID2KD|!}@RivC}`(*@}nxJJ85-YlMJ0n$YnZzPqygH zh(_s*(OlJ~b1@f(>8#`DpVbK7O0*;zTE7}J7=1Y}{xc4>g@WeF1kF=w-t^}};RXAB zJ3ihz>tUz$dxo;-%EdL!`D?EE8bW>1>&^f@)A|O5)%-R!)LJv3xa|2><>hpT(pDJF z3EOK7Zs%`}b7;+i=FtB^JViWBXgz?9=C${l@vQYXS|^~jCbk+VzoJ9=5D)9-LD#yk zv^BqdE1~&%rSm+|o_L4oOngLiCAtxx)k42@PtV>b-X>ln+7eF^_YpS|;)%Y!B^#w_ zANnoYwZ1|;yps?e&k=7C9}?ZUPNiSVmMvEf9yBOw`SRsO?(S}T1pOX7cu*vttzekG z46FQnKq%j>iAMV-+ue8H-Dcgo zb!izHT4TYrBAJN@@!x9wXigqp*6XapT6V7>8&PPo;}z zl0P3=hj=2{{GRJ*{`k^MFFg_(60%eN=(WbfcjG*)7~8?}yRG#H@;xnVonPyJ`s>#? zpE4CW^2g3~BOxKd6>ZwIX>!$7SN(x&4@6sCxm$_W{nM{szlN(Vw?WX_SFx4jo~tHzL26QjwFZQ~j&ry- zg82C_9h@IqCO^5^S!q}?p({4g-+0u9wm4K7gM!47C{2C}m6=bYGK)Tm)98yf_G8X7 z#v{r5J$TG+i)CNGi_G){&F{&-;*x(Qaefax$objh9IArL!m(I0ioTtfKZ`W)e{d{j zndXzRw`?P-%jg?vXA4vnO{Xu|5*(?HLHf2Q5xV*rIFD=YV6M>nZKZ`9BpG3sUrpWYbf~xFKG;eCn`+m2N_Qt#Ed$?v^75mF((_eK{j(awR z$K-!tW{;;45$MA_6e$mLe@^23M!J||qviZwg7cg)^jp&uUgPdZ=z_cGFXcYYGdzx? z`%{@FeFW3*pEVCkew?76$cprT()aBH2wZeGHjljz3wt%i(s8{lABP+-F6FqTW0^l~ zc9`z1{Ps0Ue_D^1;neePY#nhcwokqhu`B5}(5ETKj_8}wnhWH-ndZ*;TmDK@{prh? z{uj4SxCt8u-GW7*--(f({$cq5Rh*>j@25@F;U>SmTZmu$9>mBF_h3zro8UR{TKEpX zihhlnV{d5`=frCL|7i|M&|Eh2@!h@xgstmLzlT?0RnKd%;FB9M@Woq@%k@x-lRQve zeAa{Mz98E9U5X*kUyHTfu7<~#SJFrA=lK2r{SAMw`4i6l(9fKew$hhRHr})5gg7Uc zLf#kzhl)$mm!8@>-#wOw67={ z`)hG{AARQ4`a?VR9p@nmqv;()_d}I^*0o8{f**e`f3NxdNABo zgypwrV&`YD;v@Q|9NQG}OaECjr!@a%#1PruV@}N@l_hNC9MVhl<@yo=roW8sv)+X3 zpeC5{?lt)G`Nr0q5$QTX{Zk!$&VymoIZgnNC6lp){!%07zln6tL!_+vMDs#5ezKfC z#PP$F1?Pd%w~R*Q;&&0g=skqf2UOImUhtXL9y30?56j2*(^!;z944JJ;o1O9e}v{K z%IGhBLCc${kGcj#gw?VC+GX=hsb>-%iuI@o8Jvn zK5dEY)I^JZ)+vL2Zu;c^H5~If$T1&;Z*_*}^lm7L;aouMI%_VVAYiIBKT|=z%O^Yku}Y{Trxx75P&LbXkN%pJmvU z9g5xAftDYZO8V=mO!2^;T>5Cxd}*pT+k+kST^xc~@8#IKVmkJ5TwQT+{PN46op5bH zjzwO?@l;=p{eSb#0s43Fu*Ner-c~^$9oex#D9wzc&y75eMVE36x|F_0@=(Hi z=~48%lbd0M)jO2qcG9l3n&*8m50_FGW|I#m?eSRohNvtpKzTuyh02nA?Ax>Z)Hp0< zeu8w*pw9pPyc4eTBIm0fr%bjmAIHTD<)wBE)%vY_O!pYWZDzXafBz~6>+x%;&z)F4 zCo#R{0%bIjW%eD)S>Gr1M?D9AHU~7S@IAi~ia8t+%>Makeizx$busVrd5aT-!U+fX zEpa;7&vjOK;nZ+OFpbkrV}Yg*)a!^Qggs92p}tc7e-C|&>w8f3hrZYJU8s6Y-=|tX z?+D$~GlfxDy>q7T9M!{)-O<;#MqE-l zj)(HC`>F@TT7AF2LcBwKM0ES(AOHB}4L95{Lg4S-y!R>5h3H7UNIXd>9krLDM{(MR z(i5E$nex+uXwPrmPu_UrjdlFaD&jt7C(_bV^(}1l+sSEV5udGPm=76NV*<^Iy9tex z)Qw;K(RZTwt27!v_~3(Yxw*N~cTtAPNRKxme$K|%Wv20QnPGx`)|m9vSoW*ajhox1 zLhkwYCf~V@5dD&gc&#`c4_#l(Z_?kt`|i8XGLHwex0=`bX=d%DSFCSY?W+{%)XsQw z4Yl*+m(12R9>tfsai2IvV)ubSMb z>BtIh#XV}C;TnZ!xQ6^$`hFW`f}KCYy6K-FJ2TnP?xK|6tQ!A0>%soajKYEfvvJXA z<1ww3N!{iBkc7Ib?Sitr4sngCD zIp@*-%>LT`gy=A{@XM#Lea<6D+xigK01UUj^^Sg%%k>vYT06_VSXOY&X%qO*djt#m zG&4~_e(GoM=l9+jOEKH9w*1b4>-@21b)Wm;Kj{vxow?H#MYZBy21Wdqsa>bYZ@SW? zPq-H7F8cDn9m~GF&#am<(DGf+Z<$tSqJjDE$#xhE2fbsQd))-@q1PgK&Vy!Gz8CXv z`%K?$tpPr;GuG@_)RJp8uc2@L8_kr@U$lI}GyZO8il2EOz+`ZHEy~xoZ`>Ipu6;oHc3|Jyq{jqvU$lwpe;ci@DUDdBbv0Te zWQxe2l9&yqG|3A))=WTbpqJ*iX=9V8dgz=7`&Db$-!s{XktQSD)6mDD*NM8iCg{Q-r^hsgWK4?7t zW3A4B|x2XLTt z9*(oUJa#Y-hpILrZsUVkHRMt5^RZ3-y)_r|ql0bmLNBKoaP4;sedssl`n~nF?QSK< z6}Dm9tOvMf%+1)eaGchw&im2Jyih&2dBG@b{^}l7(l71d>U?W2hm%M4;k!Ng^k>Vw z4SWo1ruNs^)6&ztKP1d+JvI++s`bJ2fBz<`QnoN35eS{v0l^F2!^$t4(r5Ehj-ls$ zRrh_)vozkdtLnJdoZcu5S%_S(F&rCOgt9mnqW)K8!|K8gXwY&V{S10?uK*xFcPUD?kLX9R9iIZbnogOZ9^|seW$g0l_h!DQ8#MSo%Hz{l literal 0 HcmV?d00001 From 802d7933f043e1b41b3aaa3c9feb18d0f8c4f915 Mon Sep 17 00:00:00 2001 From: Emmanuel Branlard Date: Wed, 15 Dec 2021 23:06:17 -0700 Subject: [PATCH 16/36] Misc updates from weio and welib --- pydatview/GUISelectionPanel.py | 2648 ++++++++++++++-------------- pydatview/GUITools.py | 2153 +++++++++++------------ pydatview/Tables.py | 1254 +++++++------- pydatview/common.py | 910 +++++----- pydatview/fast/case_gen.py | 1179 ++++++------- pydatview/fast/fastfarm.py | 972 +++++------ pydatview/fast/postpro.py | 2563 ++++++++++++++------------- pydatview/fast/runner.py | 362 ++-- pydatview/tools/curve_fitting.py | 2766 +++++++++++++++--------------- pydatview/tools/signal.py | 1243 +++++++------- pydatview/tools/stats.py | 407 ++--- weio | 2 +- 12 files changed, 8376 insertions(+), 8083 deletions(-) diff --git a/pydatview/GUISelectionPanel.py b/pydatview/GUISelectionPanel.py index d7177aa..a623bee 100644 --- a/pydatview/GUISelectionPanel.py +++ b/pydatview/GUISelectionPanel.py @@ -1,1324 +1,1324 @@ -import wx -import platform -try: - from .common import * - from .GUICommon import * - from .GUIMultiSplit import MultiSplit - from .GUIToolBox import GetKeyString -except: - raise -# from common import * -# from GUICommon import * -# from GUIMultiSplit import MultiSplit - - -__all__ = ['ColumnPanel', 'TablePanel', 'SelectionPanel','SEL_MODES','SEL_MODES_ID','TablePopup','ColumnPopup'] - -SEL_MODES = ['auto','Same tables' ,'Sim. tables' ,'2 tables','3 tables (exp.)' ] -SEL_MODES_ID = ['auto','sameColumnsMode','simColumnsMode','twoColumnsMode' ,'threeColumnsMode' ] - -def ireplace(text, old, new): - """ Replace case insensitive """ - try: - index_l = text.lower().index(old.lower()) - return text[:index_l] + new + text[index_l + len(old):] - except: - return text - - -# --------------------------------------------------------------------------------} -# --- Formula diagog -# --------------------------------------------------------------------------------{ -class FormulaDialog(wx.Dialog): - def __init__(self, title='', name='', formula='',columns=[],unit='',xcol='',xunit=''): - wx.Dialog.__init__(self, None, title=title) - # --- Data - self.unit=unit.strip().replace(' ','') - self.columns=['{'+c+'}' for c in columns] - self.xcol='{'+xcol+'}' - self.xunit=xunit.strip().replace(' ','') - if len(formula)==0: - formula=' + '.join(self.columns) - if len(name)==0: - name=self.getDefaultName() - self.formula_in=formula - - - quick_lbl = wx.StaticText(self, label="Predefined: " ) - self.cbQuick = wx.ComboBox(self, choices=['None','x 1000','/ 1000','deg2rad','rad2deg','rpm2radps','radps2rpm','norm','squared','d/dx'], style=wx.CB_READONLY) - self.cbQuick.SetSelection(0) - self.cbQuick.Bind(wx.EVT_COMBOBOX ,self.onQuickFormula) - - # Formula info - formula_lbl = wx.StaticText(self, label="Formula: ") - self.formula = wx.TextCtrl(self) - #self.formula.SetFont(getMonoFont(self)) - - self.formula.SetValue(formula) - formula_sizer = wx.BoxSizer(wx.HORIZONTAL) - formula_sizer.Add(formula_lbl ,0,wx.ALL|wx.RIGHT|wx.CENTER,5) - formula_sizer.Add(self.formula,1,wx.ALL|wx.EXPAND|wx.CENTER,5) - formula_sizer.Add(quick_lbl ,0,wx.ALL|wx.CENTER,5) - formula_sizer.Add(self.cbQuick,0,wx.ALL|wx.CENTER,5) - - - # name info - name_lbl = wx.StaticText(self, label="New name: " ) - self.name = wx.TextCtrl(self, size=wx.Size(200,-1)) - self.name.SetValue(name) - #self.name.SetFont(getMonoFont(self)) - name_sizer = wx.BoxSizer(wx.HORIZONTAL) - name_sizer.Add(name_lbl ,0,wx.ALL|wx.RIGHT|wx.CENTER,5) - name_sizer.Add(self.name,0,wx.ALL|wx.CENTER,5) - - info ='The formula needs to have a valid python syntax for an array manipulation. The available arrays are \n' - info+='the columns of the current table. The column names (without units) are surrounded by curly brackets.\n' - info+='You have access to numpy using `np`.\n\n' - info+='For instance, if you have two columns called `ColA [m]` and `ColB [m]` you can use:\n' - info+=' - ` {ColA} + {ColB} `\n' - info+=' - ` np.sqrt( {ColA}**2/1000 + 1/{ColB}**2 ) `\n' - info+=' - ` np.sin ( {ColA}*2*np.pi + {ColB} ) `\n' - help_lbl = wx.StaticText(self, label='Help: ') - info_lbl = wx.StaticText(self, label=info) - help_sizer = wx.BoxSizer(wx.HORIZONTAL) - help_sizer.Add(help_lbl ,0,wx.ALL|wx.RIGHT|wx.TOP,5) - help_sizer.Add(info_lbl ,0,wx.ALL|wx.TOP,5) - - - - self.btOK = wx.Button(self, wx.ID_OK)#, label = "OK" ) - btCL = wx.Button(self,label = "Cancel") - bt_sizer = wx.BoxSizer(wx.HORIZONTAL) - bt_sizer.Add(self.btOK, 0 ,wx.ALL,5) - bt_sizer.Add(btCL, 0 ,wx.ALL,5) - #btOK.Bind(wx.EVT_BUTTON,self.onOK ) - btCL.Bind(wx.EVT_BUTTON,self.onCancel) - - - main_sizer = wx.BoxSizer(wx.VERTICAL) - #main_sizer.Add(quick_sizer ,0,wx.ALL|wx.EXPAND,5) - main_sizer.Add(formula_sizer,0,wx.ALL|wx.EXPAND,5) - main_sizer.Add(name_sizer ,0,wx.ALL|wx.EXPAND,5) - main_sizer.Add(help_sizer ,0 ,wx.ALL|wx.CENTER, 5) - main_sizer.Add(bt_sizer ,0, wx.ALL|wx.CENTER, 5) - self.SetSizer(main_sizer) - self.Fit() - - def stripBrackets(self,s): - return s.replace('{','').replace('}','') - - def getOneColName(self): - if len(self.columns)>0: - return self.columns[-1] - else: - return '' - - def get_unit(self): - if len(self.unit)>0: - return ' ['+self.unit+']' - else: - return '' - def get_squared_unit(self): - if len(self.unit)>0: - if self.unit[0].lower()=='-': - return ' [-]' - else: - return ' [('+self.unit+')^2]' - else: - return '' - def get_kilo_unit(self): - if len(self.unit)>0: - if len(self.unit)>=1: - if self.unit[0].lower()=='-': - return ' [-]' - elif self.unit[0].lower()=='G': - r='T' - elif self.unit[0].lower()=='M': - r='G' - elif self.unit[0]=='k': - r='M' - elif self.unit[0]=='m': - if len(self.unit)==1: - r='km' - elif self.unit[1]=='/': - r='km' - else: - r='' - else: - r='k'+self.unit[0] - return ' ['+r+self.unit[1:]+']' - else: - return ' [k'+self.unit+']' - else: - return '' - def get_milli_unit(self): - if len(self.unit)>=1: - if self.unit[0].lower()=='-': - return ' [-]' - elif self.unit[0].lower()=='T': - r='G' - elif self.unit[0]=='G': - r='M' - elif self.unit[0]=='M': - r='k' - elif self.unit[0].lower()=='k': - r='' - elif self.unit[0]=='m': - if len(self.unit)==1: - r='mm' - elif self.unit[1]=='/': - r='mm' - else: - r='mu' - else: - r='m'+self.unit[0] - - return ' ['+r+self.unit[1:]+']' - else: - return '' - def get_deriv_unit(self): - if self.unit==self.xunit: - return ' [-]' - else: - return ' ['+self.unit+'/'+self.xunit+']' - - def getDefaultName(self): - if len(self.columns)>0: - return self.stripBrackets(self.getOneColName())+' New'+self.get_unit() - else: - return '' - - def onQuickFormula(self, event): - i = self.cbQuick.GetSelection() - s = self.cbQuick.GetStringSelection() - if s=='None': - self.formula.SetValue(self.formula_in) - return - - #self.formula_in=self.formula.GetValue() - c1 = self.getOneColName() - n1 = self.stripBrackets(c1) - - if s=='x 1000': - self.formula.SetValue(c1+' * 1000') - self.name.SetValue(n1+'_x1000'+ self.get_milli_unit()) - elif s=='/ 1000': - self.formula.SetValue(c1+' / 1000') - self.name.SetValue(n1+'_/1000'+self.get_kilo_unit()) - elif s=='deg2rad': - self.formula.SetValue(c1+' *np.pi/180') - self.name.SetValue(n1+'_rad [rad]') - elif s=='rad2deg': - self.formula.SetValue(c1+' *180/np.pi') - self.name.SetValue(n1+'_deg [deg]') - elif s=='rpm2radps': - self.formula.SetValue(c1+' *2*np.pi/60') - self.name.SetValue(n1+'_radps [rad/s]') - elif s=='radps2rpm': - self.formula.SetValue(c1+' *60/(2*np.pi)') - self.name.SetValue(n1+'_rpm [rpm]') - elif s=='norm': - self.formula.SetValue('np.sqrt( '+'**2 + '.join(self.columns)+'**2 )') - self.name.SetValue(n1+'_norm'+self.get_unit()) - elif s=='squared': - self.formula.SetValue('**2 + '.join(self.columns)+'**2 ') - self.name.SetValue(n1+'^2'+self.get_squared_unit()) - elif s=='d/dx': - self.formula.SetValue('np.gradient( '+'+'.join(self.columns)+ ', '+self.xcol+' )') - nx = self.stripBrackets(self.xcol) - bDoNewName=True - if self.xunit=='s': - if n1.lower().find('speed')>=0: - n1=ireplace(n1,'speed','Acceleration') - bDoNewName=False - elif n1.lower().find('velocity')>=0: - n1=ireplace(n1,'velocity','Acceleration') - bDoNewName=False - elif n1.lower().find('vel')>=0: - n1=ireplace(n1,'vel','Acc') - bDoNewName=False - elif n1.lower().find('position')>=0: - n1=ireplace(n1,'position','speed') - bDoNewName=False - elif n1.lower().find('pos')>=0: - n1=ireplace(n1,'pos','Vel') - bDoNewName=False - else: - n1='d('+n1+')/dt' - else: - n1='d('+n1+')/d('+nx+')' - self.name.SetValue(n1+self.get_deriv_unit()) - else: - raise Exception('Unknown quick formula {}'.s) - - def onCancel(self, event): - self.Destroy() -# --------------------------------------------------------------------------------} -# --- Popup menus -# --------------------------------------------------------------------------------{ -class TablePopup(wx.Menu): - def __init__(self, mainframe, parent, fullmenu=False): - wx.Menu.__init__(self) - self.parent = parent # parent is listbox - self.mainframe = mainframe - self.ISel = self.parent.GetSelections() - - if fullmenu: - self.itNameFile = wx.MenuItem(self, -1, "Naming: by file names", kind=wx.ITEM_CHECK) - self.MyAppend(self.itNameFile) - self.Bind(wx.EVT_MENU, self.OnNaming, self.itNameFile) - self.Check(self.itNameFile.GetId(), self.parent.GetParent().tabList.Naming=='FileNames') # Checking the menu box - - item = wx.MenuItem(self, -1, "Sort by name") - self.MyAppend(item) - self.Bind(wx.EVT_MENU, self.OnSort, item) - - item = wx.MenuItem(self, -1, "Add") - self.MyAppend(item) - self.Bind(wx.EVT_MENU, self.mainframe.onAdd, item) - - if len(self.ISel)>0: - item = wx.MenuItem(self, -1, "Delete") - self.MyAppend(item) - self.Bind(wx.EVT_MENU, self.OnDeleteTabs, item) - - if len(self.ISel)==1: - tabPanel=self.parent.GetParent() - if tabPanel.tabList.Naming!='FileNames': - item = wx.MenuItem(self, -1, "Rename") - self.MyAppend(item) - self.Bind(wx.EVT_MENU, self.OnRenameTab, item) - - if len(self.ISel)==1: - item = wx.MenuItem(self, -1, "Export") - self.MyAppend(item) - self.Bind(wx.EVT_MENU, self.OnExportTab, item) - - def MyAppend(self, item): - try: - self.Append(item) # python3 - except: - self.AppendItem(item) # python2 - - def OnNaming(self, event=None): - tabPanel=self.parent.GetParent() - if self.itNameFile.IsChecked(): - tabPanel.tabList.setNaming('FileNames') - else: - tabPanel.tabList.setNaming('Ellude') - - tabPanel.updateTabNames() - - def OnDeleteTabs(self, event): - self.mainframe.deleteTabs(self.ISel) - - def OnRenameTab(self, event): - oldName = self.parent.GetString(self.ISel[0]) - dlg = wx.TextEntryDialog(self.parent, 'New table name:', 'Rename table',oldName,wx.OK|wx.CANCEL) - dlg.CentreOnParent() - if dlg.ShowModal() == wx.ID_OK: - newName=dlg.GetValue() - self.mainframe.renameTable(self.ISel[0],newName) - - def OnExportTab(self, event): - self.mainframe.exportTab(self.ISel[0]); - - def OnSort(self, event): - self.mainframe.sortTabs() - -class ColumnPopup(wx.Menu): - def __init__(self, parent, fullmenu=False): - wx.Menu.__init__(self) - self.parent = parent - self.ISel = self.parent.lbColumns.GetSelections() - - self.itShowID = wx.MenuItem(self, -1, "Show ID", kind=wx.ITEM_CHECK) - self.MyAppend(self.itShowID) - self.Bind(wx.EVT_MENU, self.OnShowID, self.itShowID) - self.Check(self.itShowID.GetId(), self.parent.bShowID) - - if self.parent.tab is not None: # TODO otherwise - item = wx.MenuItem(self, -1, "Add") - self.MyAppend(item) - self.Bind(wx.EVT_MENU, self.OnAddColumn, item) - - if len(self.ISel)==1 and self.ISel[0]>=0: - item = wx.MenuItem(self, -1, "Rename") - self.MyAppend(item) - self.Bind(wx.EVT_MENU, self.OnRenameColumn, item) - if len(self.ISel) == 1 and any( - f['pos'] == self.ISel[0] for f in self.parent.tab.formulas): - item = wx.MenuItem(self, -1, "Edit") - self.MyAppend(item) - self.Bind(wx.EVT_MENU, self.OnEditColumn, item) - if len(self.ISel)>=1 and self.ISel[0]>=0: - item = wx.MenuItem(self, -1, "Delete") - self.MyAppend(item) - self.Bind(wx.EVT_MENU, self.OnDeleteColumn, item) - - def MyAppend(self, item): - try: - self.Append(item) # python3 - except: - self.AppendItem(item) # python2 - - def OnShowID(self, event=None): - self.parent.bShowID=self.itShowID.IsChecked() - xSel,ySel,_,_ = self.parent.getColumnSelection() - self.parent.setGUIColumns(xSel=xSel, ySel=ySel) - - def OnRenameColumn(self, event=None): - iFilt = self.ISel[0] - if self.parent.bShowID: - oldName = self.parent.lbColumns.GetString(iFilt)[4:] - else: - oldName = self.parent.lbColumns.GetString(iFilt) - dlg = wx.TextEntryDialog(self.parent, 'New column name:', 'Rename column',oldName,wx.OK|wx.CANCEL) - dlg.CentreOnParent() - if dlg.ShowModal() == wx.ID_OK: - newName=dlg.GetValue() - main=self.parent.mainframe - ITab,STab=main.selPanel.getSelectedTables() - # TODO adapt me for Sim. tables mode - iFull = self.parent.Filt2Full[iFilt] - if iFull>0: # Important since -1 would rename last column of table - if main.tabList.haveSameColumns(ITab): - for iTab,sTab in zip(ITab,STab): - main.tabList.get(iTab).renameColumn(iFull-1,newName) - else: - self.parent.tab.renameColumn(iFull-1,newName) - self.parent.updateColumn(iFilt,newName) #faster - self.parent.selPanel.updateLayout() - # a trigger for the plot is required but skipped for now - - def OnEditColumn(self, event): - main=self.parent.mainframe - if len(self.ISel) != 1: - raise ValueError('Only one signal can be edited!') - ITab, STab = main.selPanel.getSelectedTables() - for iTab,sTab in zip(ITab,STab): - if sTab == self.parent.tab.active_name: - for f in main.tabList.get(iTab).formulas: - if f['pos'] == self.ISel[0]: - sName = f['name'] - sFormula = f['formula'] - break - else: - raise ValueError('No formula found at {0} for table {1}!'.format(self.ISel[0], sTab)) - self.showFormulaDialog('Edit column', sName, sFormula) - - def OnDeleteColumn(self, event): - main=self.parent.mainframe - iX = self.parent.comboX.GetSelection() - ITab,STab=main.selPanel.getSelectedTables() - # TODO adapt me for Sim. tables mode - IFull = [self.parent.Filt2Full[iFilt]-1 for iFilt in self.ISel] - IFull = [iFull for iFull in IFull if iFull>=0] - if main.tabList.haveSameColumns(ITab): - for iTab,sTab in zip(ITab,STab): - main.tabList.get(iTab).deleteColumns(IFull) - else: - self.parent.tab.deleteColumns(IFull) - self.parent.setColumns() - self.parent.setGUIColumns(xSel=iX) - main.redraw() - - def OnAddColumn(self, event): - main=self.parent.mainframe - self.showFormulaDialog('Add a new column') - - def showFormulaDialog(self, title, name='', formula=''): - bValid=False - bCancelled=False - main=self.parent.mainframe - sName=name - sFormula=formula - - if self.parent.bShowID: - columns=[no_unit(self.parent.lbColumns.GetString(i)[4:]) for i in self.ISel] - else: - columns=[no_unit(self.parent.lbColumns.GetString(i)) for i in self.ISel] - if len(self.ISel)>0: - main_unit=unit(self.parent.lbColumns.GetString(self.ISel[-1])) - else: - main_unit='' - xcol = self.parent.comboX.GetStringSelection() - xunit = unit(xcol) - xcol = no_unit(xcol) - - while (not bValid) and (not bCancelled): - dlg = FormulaDialog(title=title,columns=columns,xcol=xcol,xunit=xunit,unit=main_unit,name=sName,formula=sFormula) - dlg.CentreOnParent() - if dlg.ShowModal()==wx.ID_OK: - sName = dlg.name.GetValue() - sFormula = dlg.formula.GetValue() - dlg.Destroy() - if len(self.ISel)>0: - iFilt=self.ISel[-1] - iFull=self.parent.Filt2Full[iFilt] - else: - iFull = -1 - - ITab,STab=main.selPanel.getSelectedTables() - #if main.tabList.haveSameColumns(ITab): - sError='' - nError=0 - haveSameColumns=main.tabList.haveSameColumns(ITab) - for iTab,sTab in zip(ITab,STab): - if haveSameColumns or self.parent.tab.active_name == sTab: - # apply formula to all tables with same columns, otherwise only to active table - if title.startswith('Edit'): - bValid=main.tabList.get(iTab).setColumnByFormula(sName,sFormula,iFull) - else: - bValid=main.tabList.get(iTab).addColumnByFormula(sName,sFormula,iFull) - if not bValid: - sError+='The formula didn''t eval for table {}\n'.format(sTab) - nError+=1 - if len(sError)>0: - Error(self.parent,sError) - if nErrorlen(self.columns): - print('',self.Filt2Full) - print('',self.columns) - raise Exception('Error in Filt2Full') - return self.columns[self.Filt2Full] - - def setReadOnly(self, tabLabel='', cols=[]): - """ Set this list of columns as readonly and non selectable """ - self.tab=None - self.bReadOnly=True - self.lb.SetLabel(tabLabel) - self.setColumns(columnNames=cols) - self.setGUIColumns() - self.lbColumns.Enable(True) - self.comboX.Enable(False) - self.lbColumns.SetSelection(-1) - self.bt.Enable(False) - self.bShowID=False - self.btClear.Enable(False) - self.btFilter.Enable(False) - self.tFilter.Enable(False) - self.tFilter.SetValue('') - - def setTab(self,tab=None,xSel=-1,ySel=[],colNames=None, tabLabel=''): - """ Set the table used for the columns, update the GUI """ - self.tab=tab; - self.lbColumns.Enable(True) - self.comboX.Enable(True) - self.bReadOnly=False - self.btClear.Enable(True) - self.btFilter.Enable(True) - self.tFilter.Enable(True) - self.bt.Enable(True) - if tab is not None: - self.Filt2Full=None # TODO - if tab.active_name!='default': - self.lb.SetLabel(' '+tab.active_name) - self.setColumns() - self.setGUIColumns(xSel=xSel, ySel=ySel) - else: - self.Filt2Full=None # TODO Decide whether filter should be applied... - self.lb.SetLabel(tabLabel) - self.setColumns(columnNames=colNames) - self.setGUIColumns(xSel=xSel, ySel=ySel) - - def updateColumn(self,i,newName): - """ Update of one column name - i: index in GUI - """ - iFull = self.Filt2Full[i] - if self.bShowID: - newName='{:03d} '.format(iFull)+newName - self.lbColumns.SetString(i,newName) - self.comboX.SetString (i,newName) - self.columns[iFull] = newName - - def Full2Filt(self,iFull): - try: - return self.Filt2Full.index(iFull) - except: - return -1 - - def setColumns(self, columnNames=None): - # Get columns from user inputs, or table, or stored. - if columnNames is not None: - # Populating based on user inputs.. - columns=columnNames - elif self.tab is None: - columns=self.columns - else: - # Populating based on table (safest if table was updated) - columns=['Index']+self.tab.columns - # Storing columns, considered as "Full" - self.columns=np.array(columns) - - def setGUIColumns(self, xSel=-1, ySel=[]): - """ Set GUI columns based on self.columns and potential filter """ - # Filtering columns if neeed - sFilt = self.tFilter.GetLineText(0).strip() - if len(sFilt)>0: - Lf, If = filter_list(self.columns, sFilt) - self.Filt2Full = If - else: - self.Filt2Full = list(np.arange(len(self.columns))) - columns=self.columns[self.Filt2Full] - - # GUI update - self.Freeze() - if self.bShowID: - columnsY= ['{:03d} '.format(i)+c for i,c in enumerate(columns)] - columnsX= ['{:03d} '.format(i)+c for i,c in enumerate(self.columns)] - else: - columnsY= columns - columnsX= self.columns - self.lbColumns.Set(columnsY) # potentially filterd - # Slow line for many columns - # NOTE: limiting to 300 for now.. I'm not sure anywant would want to scroll more than that - # Consider adding a "more button" - # see e.g. https://comp.soft-sys.wxwindows.narkive.com/gDfA1Ds5/long-load-time-in-wxpython - self.comboX.Set(columnsX[:300]) # non filtered - - # Set selection for y, if any, and considering filtering - for iFull in ySel: - if iFull=0: - iFilt = self.Full2Filt(iFull) - if iFilt>0: - self.lbColumns.SetSelection(iFilt) - self.lbColumns.EnsureVisible(iFilt) - if len(self.lbColumns.GetSelections())<=0: - self.lbColumns.SetSelection(self.getDefaultColumnY(self.tab,len(columnsY)-1)) - - # Set selection for x, if any, NOTE x is not filtered! - if (xSel<0) or xSel>len(columnsX): - self.comboX.SetSelection(self.getDefaultColumnX(self.tab,len(columnsX)-1)) - else: - self.comboX.SetSelection(xSel) - self.Thaw() - - def forceOneSelection(self): - ISel=self.lbColumns.GetSelections() - self.lbColumns.SetSelection(-1) - if len(ISel)>0: - self.lbColumns.SetSelection(ISel[0]) - - def forceZeroSelection(self): - self.lbColumns.SetSelection(-1) - - def empty(self): - self.lbColumns.Clear() - self.comboX.Clear() - self.lb.SetLabel('') - self.bReadOnly=False - self.lbColumns.Enable(False) - self.comboX.Enable(False) - self.bt.Enable(False) - self.tab=None - self.columns=[] - self.Filt2Full=None - self.btClear.Enable(False) - self.btFilter.Enable(False) - self.tFilter.Enable(False) - self.tFilter.SetValue('') - - def getColumnSelection(self): - iX = self.comboX.GetSelection() - if self.bShowID: - sX = self.comboX.GetStringSelection()[4:] - else: - sX = self.comboX.GetStringSelection() - IY = self.lbColumns.GetSelections() - if self.bShowID: - SY = [self.lbColumns.GetString(i)[4:] for i in IY] - else: - SY = [self.lbColumns.GetString(i) for i in IY] - iXFull = iX # NOTE: x is always in full - IYFull = [self.Filt2Full[iY] for iY in IY] - return iXFull,IYFull,sX,SY - - def onClearFilter(self, event=None): - self.tFilter.SetValue('') - self.onFilterChange() - - def onFilterChange(self, event=None): - xSel,ySel,_,_ = self.getColumnSelection() # (indices in full) - self.setGUIColumns(xSel=xSel, ySel=ySel) # <<< Filtering done here - self.triggerPlot() # Trigger a col selection event - - def onFilterKey(self, event=None): - s=GetKeyString(event) - if s=='ESCAPE' or s=='Ctrl+C': - self.onClearFilter() - event.Skip() - - def triggerPlot(self): - event=wx.PyCommandEvent(wx.EVT_LISTBOX.typeId, self.lbColumns.GetId()) - wx.PostEvent(self.GetEventHandler(), event) - - -# --------------------------------------------------------------------------------} -# --- Selection Panel -# --------------------------------------------------------------------------------{ -class SelectionPanel(wx.Panel): - """ Display options for the user to select data """ - def __init__(self, parent, tabList, mode='auto',mainframe=None): - # Superclass constructor - super(SelectionPanel,self).__init__(parent) - # DATA - self.mainframe = mainframe - self.tabList = None - self.itabForCol = None - self.parent = parent - self.tabSelections = {} - self.tabSelected = [] # NOTE only used to remember a selection after a reload - self.modeRequested = mode - self.currentMode = None - self.nSplits = -1 - - # GUI DATA - self.splitter = MultiSplit(self, style=wx.SP_LIVE_UPDATE) - self.splitter.SetMinimumPaneSize(70) - self.tabPanel = TablePanel (self.splitter,mainframe, tabList) - self.colPanel1 = ColumnPanel(self.splitter, self, mainframe); - self.colPanel2 = ColumnPanel(self.splitter, self, mainframe); - self.colPanel3 = ColumnPanel(self.splitter, self, mainframe); - self.tabPanel.Hide() - self.colPanel1.Hide() - self.colPanel2.Hide() - self.colPanel3.Hide() - - # Layout - self.updateLayout() - VertSizer = wx.BoxSizer(wx.VERTICAL) - VertSizer.Add(self.splitter, 2, flag=wx.EXPAND, border=0) - self.SetSizer(VertSizer) - - # TRIGGERS - self.setTables(tabList) - - def updateLayout(self,mode=None): - self.Freeze() - if mode is None: - mode=self.modeRequested - else: - self.modeRequested = mode - if mode=='auto': - self.autoMode() - elif mode=='sameColumnsMode': - self.sameColumnsMode() - elif mode=='simColumnsMode': - self.simColumnsMode() - elif mode=='twoColumnsMode': - self.twoColumnsMode() - elif mode=='threeColumnsMode': - self.threeColumnsMode() - else: - self.Thaw() - raise Exception('Wrong mode for selection layout: {}'.format(mode)) - self.Thaw() - - - def autoMode(self): - ISel=self.tabPanel.lbTab.GetSelections() - if self.tabList is not None: - if self.tabList.len()<=0: - self.nSplits=-1 - self.splitter.removeAll() - elif self.tabList.haveSameColumns(): - self.sameColumnsMode() - elif self.tabList.haveSameColumns(ISel): - # We don't do same column because we know at least one table is different - # to avoid "jumping" too much - self.twoColumnsMode() - else: - # See if tables are quite similar - IKeepPerTab, IMissPerTab, IDuplPerTab= getTabCommonColIndices([self.tabList.get(i) for i in ISel]) - if np.all(np.array([len(I) for I in IMissPerTab])<30) and np.all(np.array([len(I) for I in IKeepPerTab])>=2): - self.simColumnsMode() - elif len(ISel)==2: - self.twoColumnsMode() - elif len(ISel)==3: - self.threeColumnsMode() - else: - #self.simColumnsMode(self) - raise Exception('Too many panels selected with significant columns differences.') - - def sameColumnsMode(self): - self.currentMode = 'sameColumnsMode' - if self.nSplits==1: - return - if self.nSplits==0 and self.tabList.len()<=1: - return - self.splitter.removeAll() - if self.tabList is not None: - if self.tabList.len()>1: - self.splitter.AppendWindow(self.tabPanel) - self.splitter.AppendWindow(self.colPanel1) - if self.mainframe is not None: - self.mainframe.mainFrameUpdateLayout() - if self.tabList is not None: - if self.tabList.len()<=1: - self.nSplits=0 - else: - self.nSplits=1 - else: - self.nSplits=0 - - def simColumnsMode(self): - self.currentMode = 'simColumnsMode' - self.splitter.removeAll() - self.splitter.AppendWindow(self.tabPanel) - self.splitter.AppendWindow(self.colPanel2) - self.splitter.AppendWindow(self.colPanel1) - self.splitter.setEquiSash() - if self.nSplits<2 and self.mainframe is not None: - self.mainframe.mainFrameUpdateLayout() - self.nSplits=2 - - def twoColumnsMode(self): - self.currentMode = 'twoColumnsMode' - if self.nSplits==2: - return - self.splitter.removeAll() - self.splitter.AppendWindow(self.tabPanel) - self.splitter.AppendWindow(self.colPanel2) - self.splitter.AppendWindow(self.colPanel1) - self.splitter.setEquiSash() - if self.nSplits<2 and self.mainframe is not None: - self.mainframe.mainFrameUpdateLayout() - self.nSplits=2 - - def threeColumnsMode(self): - self.currentMode = 'threeColumnsMode' - if self.nSplits==3: - return - self.splitter.removeAll() - self.splitter.AppendWindow(self.tabPanel) - self.splitter.AppendWindow(self.colPanel3) - self.splitter.AppendWindow(self.colPanel2) - self.splitter.AppendWindow(self.colPanel1) - self.splitter.setEquiSash() - if self.mainframe is not None: - self.mainframe.mainFrameUpdateLayout() - self.nSplits=3 - - def setTables(self,tabList,update=False): - """ Set the list of tables. Keeping the selection if it's an update """ - # TODO PUT ME IN TABLE PANEL - # Find a better way to remember selection - #print('UPDATING TABLES') - # Emptying GUI - TODO only if needed - self.colPanel1.empty() - self.colPanel2.empty() - self.colPanel3.empty() - # Adding - self.tabList = tabList - self.tabPanel.tabList = self.tabList - tabnames = self.tabList.tabNames - self.tabPanel.updateTabNames() - for tn in tabnames: - if tn not in self.tabSelections.keys(): - self.tabSelections[tn]={'xSel':-1,'ySel':[]} - else: - pass # do nothing - - # Reselecting - if len(self.tabSelected)>0: - # Removed line below since two column mode implemented - #if not haveSameColumns(tabs,ISel): - # ISel=[ISel[0]] - for i in self.tabSelected: - if i0: - # Trigger - updating columns and layout - ISel=self.tabPanel.lbTab.GetSelections() - self.tabSelected=ISel - if self.currentMode=='simColumnsMode': - self.setColForSimTab(ISel) - else: - if len(ISel)==1: - self.setTabForCol(ISel[0],1) - elif len(ISel)==2: - self.setTabForCol(ISel[0],1) - self.setTabForCol(ISel[1],2) - elif len(ISel)==3: - self.setTabForCol(ISel[0],1) - self.setTabForCol(ISel[1],2) - self.setTabForCol(ISel[2],3) - else: # Likely all tables have the same columns - self.setTabForCol(ISel[0],1) - self.updateLayout(self.modeRequested) - - def setTabForCol(self,iTabSel,iPanel): - t = self.tabList.get(iTabSel) - ts = self.tabSelections[t.name] - if iPanel==1: - self.colPanel1.setTab(t,ts['xSel'],ts['ySel']) - elif iPanel==2: - self.colPanel2.setTab(t,ts['xSel'],ts['ySel']) - elif iPanel==3: - self.colPanel3.setTab(t,ts['xSel'],ts['ySel']) - else: - raise Exception('Wrong ipanel') - - def setColForSimTab(self,ISel): - """ Set column panels for similar tables """ - tabs = [self.tabList.get(i) for i in ISel] - IKeepPerTab, IMissPerTab, IDuplPerTab = getTabCommonColIndices(tabs) - LenMiss = np.array([len(I) for I in IMissPerTab]) - LenKeep = np.array([len(I) for I in IKeepPerTab]) - LenDupl = np.array([len(I) for I in IDuplPerTab]) - - ColInfo = ['Sim. table mode '] - ColInfo += [''] - if self.tabList.haveSameColumns(ISel): - if len(ISel)>1: - ColInfo += ['Columns identical',''] - else: - if (np.all(np.array(LenMiss)==0)): - ColInfo += ['Columns identical'] - ColInfo += ['Order different!'] - - ColInfo += ['','First difference:'] - ColInfo.append('----------------------------------') - bFirst=True - for it,t in enumerate(tabs): - print('IKeep',IKeepPerTab[it]) - if it==0: - continue - INotOrdered=[ii for i,ii in enumerate(IKeepPerTab[it]) if ii!=IKeepPerTab[0][i]] - print('INot',INotOrdered) - if len(INotOrdered)>0: - im=INotOrdered[0] - if bFirst: - ColInfo.append('{}:'.format(tabs[0].active_name)) - ColInfo.append('{:03d} {:s}'.format(im, tabs[0].columns[im])) - bFirst=False - ColInfo.append('{}:'.format(t.active_name)) - ColInfo.append('{:03d} {:s}'.format(im, t.columns[im])) - ColInfo.append('----------------------------------') - - else: - ColInfo += ['Columns different!'] - ColInfo += ['(similar: {})'.format(LenKeep[0])] - ColInfo += ['','Missing columns:'] - ColInfo.append('----------------------------------') - for it,t in enumerate(tabs): - ColInfo.append('{}:'.format(t.active_name)) - if len(IMissPerTab[it])==0: - ColInfo.append(' (None) ') - for im in IMissPerTab[it]: - ColInfo.append('{:03d} {:s}'.format(im, t.columns[im])) - ColInfo.append('----------------------------------') - - if (np.any(np.array(LenDupl)>0)): - if len(ISel)>1: - ColInfo += ['','Common duplicates:'] - else: - ColInfo += ['','Duplicates:'] - ColInfo.append('----------------------------------') - for it,t in enumerate(tabs): - ColInfo.append('{}:'.format(t.active_name)) - if len(IDuplPerTab[it])==0: - ColInfo.append(' (None) ') - for im in IDuplPerTab[it]: - ColInfo.append('{:03d} {:s}'.format(im, t.columns[im])) - ColInfo.append('----------------------------------') - - - colNames = ['Index'] + [tabs[0].columns[i] for i in IKeepPerTab[0]] - self.colPanel1.setTab(tab=None, colNames=colNames, tabLabel=' Tab. Intersection') - self.colPanel2.setReadOnly(' Tab. Difference', ColInfo) - self.IKeepPerTab=IKeepPerTab - - - - def selectDefaultTable(self): - # Selecting the first table - if self.tabPanel.lbTab.GetCount()>0: - self.tabPanel.lbTab.SetSelection(0) - self.tabSelected=[0] - else: - self.tabSelected=[] - - def tabSelectionChanged(self): - # TODO This can be cleaned-up and merged with updateLayout - #print('Tab selection change') - # Storing the previous selection - #self.printSelection() - self.saveSelection() # - #self.printSelection() - ISel=self.tabPanel.lbTab.GetSelections() - if len(ISel)>0: - if self.modeRequested=='auto': - self.autoMode() - if self.currentMode=='simColumnsMode':# and len(ISel)>1: - self.setColForSimTab(ISel) - self.tabSelected=self.tabPanel.lbTab.GetSelections() - return - - if self.tabList.haveSameColumns(ISel): - # Setting tab - self.setTabForCol(ISel[0],1) - self.colPanel2.empty() - self.colPanel3.empty() - else: - if self.nSplits==2: - if len(ISel)>2: - Error(self,'In this mode, only two tables can be selected. To compare three tables, uses the "3 different tables" mode. Otherwise the tables need to have the same columns.') - ISel=ISel[0:2] - self.tabPanel.lbTab.SetSelection(wx.NOT_FOUND) - for isel in ISel: - self.tabPanel.lbTab.SetSelection(isel) - self.colPanel3.empty() - elif self.nSplits==3: - if len(ISel)>3: - Error(self,'In this mode, only three tables can be selected. To compare more than three tables, the tables need to have the same columns.') - ISel=ISel[0:3] - self.tabPanel.lbTab.SetSelection(wx.NOT_FOUND) - for isel in ISel: - self.tabPanel.lbTab.SetSelection(isel) - else: - Error(self,'The tables selected have different columns.\n\nThis is not compatible with the "Same tables" mode. To compare them, chose one of the following mode: "2 tables", "3 tables" or "Sim. tables".') - self.colPanel2.empty() - self.colPanel3.empty() - # unselect all and select only the first one - ISel=[ISel[0]] - self.tabPanel.lbTab.SetSelection(wx.NOT_FOUND) - self.tabPanel.lbTab.SetSelection(ISel[0]) - for iPanel,iTab in enumerate(ISel): - self.setTabForCol(iTab,iPanel+1) - #print('>>>Updating tabSelected, from',self.tabSelected,'to',self.tabPanel.lbTab.GetSelections()) - self.tabSelected=self.tabPanel.lbTab.GetSelections() - - def colSelectionChanged(self): - """ Simple triggers when column selection is changed, NOTE: does not redraw """ - if self.currentMode=='simColumnsMode': - self.colPanel2.forceZeroSelection() - else: - if self.nSplits in [2,3]: - ISel=self.tabPanel.lbTab.GetSelections() - if self.tabList.haveSameColumns(ISel): - pass # TODO: this test is identical to onTabSelectionChange. Unification. - # elif len(ISel)==2: - # self.colPanel1.forceOneSelection() - # self.colPanel2.forceOneSelection() - # elif len(ISel)==3: - # self.colPanel1.forceOneSelection() - # self.colPanel2.forceOneSelection() - # self.colPanel3.forceOneSelection() - - def update_tabs(self, tabList): - self.setTables(tabList, update=True) - - def renameTable(self,iTab, oldName, newName): - #self.printSelection() - self.tabSelections[newName] = self.tabSelections.pop(oldName) - self.tabPanel.updateTabNames() - #self.printSelection() - - def saveSelection(self): - #self.ISel=self.tabPanel.lbTab.GetSelections() - ISel=self.tabSelected # - if self.tabList.haveSameColumns(ISel): - for ii in ISel: - t=self.tabList.get(ii) - self.tabSelections[t.name]['xSel'] = self.colPanel1.comboX.GetSelection() - self.tabSelections[t.name]['ySel'] = self.colPanel1.lbColumns.GetSelections() - else: - if len(ISel)>=1: - t=self.tabList.get(ISel[0]) - self.tabSelections[t.name]['xSel'] = self.colPanel1.comboX.GetSelection() - self.tabSelections[t.name]['ySel'] = self.colPanel1.lbColumns.GetSelections() - if len(ISel)>=2: - t=self.tabList.get(ISel[1]) - self.tabSelections[t.name]['xSel'] = self.colPanel2.comboX.GetSelection() - self.tabSelections[t.name]['ySel'] = self.colPanel2.lbColumns.GetSelections() - if len(ISel)>=3: - t=self.tabList.get(ISel[2]) - self.tabSelections[t.name]['xSel'] = self.colPanel3.comboX.GetSelection() - self.tabSelections[t.name]['ySel'] = self.colPanel3.lbColumns.GetSelections() - self.tabSelected = self.tabPanel.lbTab.GetSelections(); - - def printSelection(self): - print('Number of tabSelections stored:',len(self.tabSelections)) - TS=self.tabSelections - for i,tn in enumerate(self.tabList.tabNames): - if tn not in TS.keys(): - print('Tab',i,'>>> Name {} not found in selection'.format(tn)) - else: - print('Tab',i,'xSel:',TS[tn]['xSel'],'ySel:',TS[tn]['ySel'],'Name:',tn) - - def getPlotDataSelection(self): - ID = [] - SameCol=False - if self.tabList is not None and self.tabList.len()>0: - ITab,STab = self.getSelectedTables() - if self.currentMode=='simColumnsMode' and len(ITab)>1: - iiX1,IY1,ssX1,SY1 = self.colPanel1.getColumnSelection() - SameCol=False - for i,(itab,stab) in enumerate(zip(ITab,STab)): - IKeep=self.IKeepPerTab[i] - for j,(iiy,ssy) in enumerate(zip(IY1,SY1)): - if iiy==0: - iy = 0 - sy = ssy - else: - iy = IKeep[iiy-1]+1 - sy = self.tabList.get(itab).columns[IKeep[iiy-1]] - if iiX1==0: - iX1 = 0 - sX1 = ssX1 - else: - iX1 = IKeep[iiX1-1]+1 - sX1 = self.tabList.get(itab).columns[IKeep[iiX1-1]] - ID.append([itab,iX1,iy,sX1,sy,stab]) - else: - iX1,IY1,sX1,SY1 = self.colPanel1.getColumnSelection() - SameCol=self.tabList.haveSameColumns(ITab) - if self.nSplits in [0,1] or SameCol: - for i,(itab,stab) in enumerate(zip(ITab,STab)): - for j,(iy,sy) in enumerate(zip(IY1,SY1)): - ID.append([itab,iX1,iy,sX1,sy,stab]) - elif self.nSplits in [2,3]: - if len(ITab)>=1: - for j,(iy,sy) in enumerate(zip(IY1,SY1)): - ID.append([ITab[0],iX1,iy,sX1,sy,STab[0]]) - if len(ITab)>=2: - iX2,IY2,sX2,SY2 = self.colPanel2.getColumnSelection() - for j,(iy,sy) in enumerate(zip(IY2,SY2)): - ID.append([ITab[1],iX2,iy,sX2,sy,STab[1]]) - if len(ITab)>=3: - iX2,IY2,sX2,SY2 = self.colPanel3.getColumnSelection() - for j,(iy,sy) in enumerate(zip(IY2,SY2)): - ID.append([ITab[2],iX2,iy,sX2,sy,STab[2]]) - else: - raise Exception('Wrong number of splits {}'.format(self.nSplits)) - return ID,SameCol,self.currentMode - - def getSelectedTables(self): - I=self.tabPanel.lbTab.GetSelections() - S=[self.tabPanel.lbTab.GetString(i) for i in I] - return I,S - - def getAllTables(self): - I=range(self.tabPanel.lbTab.GetCount()) - S=[self.tabPanel.lbTab.GetString(i) for i in I] - return I,S - - def clean_memory(self): - self.colPanel1.empty() - self.colPanel2.empty() - self.colPanel3.empty() - self.tabPanel.empty() - del self.tabList - self.tabList=None - - @property - def xCol(self): - iX, _, sX, _ = self.colPanel1.getColumnSelection() - return iX,sX - - -if __name__ == '__main__': - import pandas as pd; - from Tables import Table - import numpy as np - - def OnTabPopup(event): - self.PopupMenu(TablePopup(self,selPanel.tabPanel.lbTab), event.GetPosition()) - - app = wx.App(False) - self=wx.Frame(None,-1,"Title") - tab=Table(data=pd.DataFrame(data={'ColA': np.random.normal(0,1,100)+1,'ColB':np.random.normal(0,1,100)+2})) - selPanel=SelectionPanel(self,[tab],mode='twoColumnsMode') - self.SetSize((800, 600)) - self.Center() - self.Show() - selPanel.tabPanel.lbTab.Bind(wx.EVT_RIGHT_DOWN, OnTabPopup) - - - app.MainLoop() - +import wx +import platform +try: + from .common import * + from .GUICommon import * + from .GUIMultiSplit import MultiSplit + from .GUIToolBox import GetKeyString +except: + raise +# from common import * +# from GUICommon import * +# from GUIMultiSplit import MultiSplit + + +__all__ = ['ColumnPanel', 'TablePanel', 'SelectionPanel','SEL_MODES','SEL_MODES_ID','TablePopup','ColumnPopup'] + +SEL_MODES = ['auto','Same tables' ,'Sim. tables' ,'2 tables','3 tables (exp.)' ] +SEL_MODES_ID = ['auto','sameColumnsMode','simColumnsMode','twoColumnsMode' ,'threeColumnsMode' ] + +def ireplace(text, old, new): + """ Replace case insensitive """ + try: + index_l = text.lower().index(old.lower()) + return text[:index_l] + new + text[index_l + len(old):] + except: + return text + + +# --------------------------------------------------------------------------------} +# --- Formula diagog +# --------------------------------------------------------------------------------{ +class FormulaDialog(wx.Dialog): + def __init__(self, title='', name='', formula='',columns=[],unit='',xcol='',xunit=''): + wx.Dialog.__init__(self, None, title=title) + # --- Data + self.unit=unit.strip().replace(' ','') + self.columns=['{'+c+'}' for c in columns] + self.xcol='{'+xcol+'}' + self.xunit=xunit.strip().replace(' ','') + if len(formula)==0: + formula=' + '.join(self.columns) + if len(name)==0: + name=self.getDefaultName() + self.formula_in=formula + + + quick_lbl = wx.StaticText(self, label="Predefined: " ) + self.cbQuick = wx.ComboBox(self, choices=['None','x 1000','/ 1000','deg2rad','rad2deg','rpm2radps','radps2rpm','norm','squared','d/dx'], style=wx.CB_READONLY) + self.cbQuick.SetSelection(0) + self.cbQuick.Bind(wx.EVT_COMBOBOX ,self.onQuickFormula) + + # Formula info + formula_lbl = wx.StaticText(self, label="Formula: ") + self.formula = wx.TextCtrl(self) + #self.formula.SetFont(getMonoFont(self)) + + self.formula.SetValue(formula) + formula_sizer = wx.BoxSizer(wx.HORIZONTAL) + formula_sizer.Add(formula_lbl ,0,wx.ALL|wx.RIGHT|wx.CENTER,5) + formula_sizer.Add(self.formula,1,wx.ALL|wx.EXPAND|wx.CENTER,5) + formula_sizer.Add(quick_lbl ,0,wx.ALL|wx.CENTER,5) + formula_sizer.Add(self.cbQuick,0,wx.ALL|wx.CENTER,5) + + + # name info + name_lbl = wx.StaticText(self, label="New name: " ) + self.name = wx.TextCtrl(self, size=wx.Size(200,-1)) + self.name.SetValue(name) + #self.name.SetFont(getMonoFont(self)) + name_sizer = wx.BoxSizer(wx.HORIZONTAL) + name_sizer.Add(name_lbl ,0,wx.ALL|wx.RIGHT|wx.CENTER,5) + name_sizer.Add(self.name,0,wx.ALL|wx.CENTER,5) + + info ='The formula needs to have a valid python syntax for an array manipulation. The available arrays are \n' + info+='the columns of the current table. The column names (without units) are surrounded by curly brackets.\n' + info+='You have access to numpy using `np`.\n\n' + info+='For instance, if you have two columns called `ColA [m]` and `ColB [m]` you can use:\n' + info+=' - ` {ColA} + {ColB} `\n' + info+=' - ` np.sqrt( {ColA}**2/1000 + 1/{ColB}**2 ) `\n' + info+=' - ` np.sin ( {ColA}*2*np.pi + {ColB} ) `\n' + help_lbl = wx.StaticText(self, label='Help: ') + info_lbl = wx.StaticText(self, label=info) + help_sizer = wx.BoxSizer(wx.HORIZONTAL) + help_sizer.Add(help_lbl ,0,wx.ALL|wx.RIGHT|wx.TOP,5) + help_sizer.Add(info_lbl ,0,wx.ALL|wx.TOP,5) + + + + self.btOK = wx.Button(self, wx.ID_OK)#, label = "OK" ) + btCL = wx.Button(self,label = "Cancel") + bt_sizer = wx.BoxSizer(wx.HORIZONTAL) + bt_sizer.Add(self.btOK, 0 ,wx.ALL,5) + bt_sizer.Add(btCL, 0 ,wx.ALL,5) + #btOK.Bind(wx.EVT_BUTTON,self.onOK ) + btCL.Bind(wx.EVT_BUTTON,self.onCancel) + + + main_sizer = wx.BoxSizer(wx.VERTICAL) + #main_sizer.Add(quick_sizer ,0,wx.ALL|wx.EXPAND,5) + main_sizer.Add(formula_sizer,0,wx.ALL|wx.EXPAND,5) + main_sizer.Add(name_sizer ,0,wx.ALL|wx.EXPAND,5) + main_sizer.Add(help_sizer ,0 ,wx.ALL|wx.CENTER, 5) + main_sizer.Add(bt_sizer ,0, wx.ALL|wx.CENTER, 5) + self.SetSizer(main_sizer) + self.Fit() + + def stripBrackets(self,s): + return s.replace('{','').replace('}','') + + def getOneColName(self): + if len(self.columns)>0: + return self.columns[-1] + else: + return '' + + def get_unit(self): + if len(self.unit)>0: + return ' ['+self.unit+']' + else: + return '' + def get_squared_unit(self): + if len(self.unit)>0: + if self.unit[0].lower()=='-': + return ' [-]' + else: + return ' [('+self.unit+')^2]' + else: + return '' + def get_kilo_unit(self): + if len(self.unit)>0: + if len(self.unit)>=1: + if self.unit[0].lower()=='-': + return ' [-]' + elif self.unit[0].lower()=='G': + r='T' + elif self.unit[0].lower()=='M': + r='G' + elif self.unit[0]=='k': + r='M' + elif self.unit[0]=='m': + if len(self.unit)==1: + r='km' + elif self.unit[1]=='/': + r='km' + else: + r='' + else: + r='k'+self.unit[0] + return ' ['+r+self.unit[1:]+']' + else: + return ' [k'+self.unit+']' + else: + return '' + def get_milli_unit(self): + if len(self.unit)>=1: + if self.unit[0].lower()=='-': + return ' [-]' + elif self.unit[0].lower()=='T': + r='G' + elif self.unit[0]=='G': + r='M' + elif self.unit[0]=='M': + r='k' + elif self.unit[0].lower()=='k': + r='' + elif self.unit[0]=='m': + if len(self.unit)==1: + r='mm' + elif self.unit[1]=='/': + r='mm' + else: + r='mu' + else: + r='m'+self.unit[0] + + return ' ['+r+self.unit[1:]+']' + else: + return '' + def get_deriv_unit(self): + if self.unit==self.xunit: + return ' [-]' + else: + return ' ['+self.unit+'/'+self.xunit+']' + + def getDefaultName(self): + if len(self.columns)>0: + return self.stripBrackets(self.getOneColName())+' New'+self.get_unit() + else: + return '' + + def onQuickFormula(self, event): + i = self.cbQuick.GetSelection() + s = self.cbQuick.GetStringSelection() + if s=='None': + self.formula.SetValue(self.formula_in) + return + + #self.formula_in=self.formula.GetValue() + c1 = self.getOneColName() + n1 = self.stripBrackets(c1) + + if s=='x 1000': + self.formula.SetValue(c1+' * 1000') + self.name.SetValue(n1+'_x1000'+ self.get_milli_unit()) + elif s=='/ 1000': + self.formula.SetValue(c1+' / 1000') + self.name.SetValue(n1+'_/1000'+self.get_kilo_unit()) + elif s=='deg2rad': + self.formula.SetValue(c1+' *np.pi/180') + self.name.SetValue(n1+'_rad [rad]') + elif s=='rad2deg': + self.formula.SetValue(c1+' *180/np.pi') + self.name.SetValue(n1+'_deg [deg]') + elif s=='rpm2radps': + self.formula.SetValue(c1+' *2*np.pi/60') + self.name.SetValue(n1+'_radps [rad/s]') + elif s=='radps2rpm': + self.formula.SetValue(c1+' *60/(2*np.pi)') + self.name.SetValue(n1+'_rpm [rpm]') + elif s=='norm': + self.formula.SetValue('np.sqrt( '+'**2 + '.join(self.columns)+'**2 )') + self.name.SetValue(n1+'_norm'+self.get_unit()) + elif s=='squared': + self.formula.SetValue('**2 + '.join(self.columns)+'**2 ') + self.name.SetValue(n1+'^2'+self.get_squared_unit()) + elif s=='d/dx': + self.formula.SetValue('np.gradient( '+'+'.join(self.columns)+ ', '+self.xcol+' )') + nx = self.stripBrackets(self.xcol) + bDoNewName=True + if self.xunit=='s': + if n1.lower().find('speed')>=0: + n1=ireplace(n1,'speed','Acceleration') + bDoNewName=False + elif n1.lower().find('velocity')>=0: + n1=ireplace(n1,'velocity','Acceleration') + bDoNewName=False + elif n1.lower().find('vel')>=0: + n1=ireplace(n1,'vel','Acc') + bDoNewName=False + elif n1.lower().find('position')>=0: + n1=ireplace(n1,'position','speed') + bDoNewName=False + elif n1.lower().find('pos')>=0: + n1=ireplace(n1,'pos','Vel') + bDoNewName=False + else: + n1='d('+n1+')/dt' + else: + n1='d('+n1+')/d('+nx+')' + self.name.SetValue(n1+self.get_deriv_unit()) + else: + raise Exception('Unknown quick formula {}'.s) + + def onCancel(self, event): + self.Destroy() +# --------------------------------------------------------------------------------} +# --- Popup menus +# --------------------------------------------------------------------------------{ +class TablePopup(wx.Menu): + def __init__(self, mainframe, parent, fullmenu=False): + wx.Menu.__init__(self) + self.parent = parent # parent is listbox + self.mainframe = mainframe + self.ISel = self.parent.GetSelections() + + if fullmenu: + self.itNameFile = wx.MenuItem(self, -1, "Naming: by file names", kind=wx.ITEM_CHECK) + self.MyAppend(self.itNameFile) + self.Bind(wx.EVT_MENU, self.OnNaming, self.itNameFile) + self.Check(self.itNameFile.GetId(), self.parent.GetParent().tabList.Naming=='FileNames') # Checking the menu box + + item = wx.MenuItem(self, -1, "Sort by name") + self.MyAppend(item) + self.Bind(wx.EVT_MENU, self.OnSort, item) + + item = wx.MenuItem(self, -1, "Add") + self.MyAppend(item) + self.Bind(wx.EVT_MENU, self.mainframe.onAdd, item) + + if len(self.ISel)>0: + item = wx.MenuItem(self, -1, "Delete") + self.MyAppend(item) + self.Bind(wx.EVT_MENU, self.OnDeleteTabs, item) + + if len(self.ISel)==1: + tabPanel=self.parent.GetParent() + if tabPanel.tabList.Naming!='FileNames': + item = wx.MenuItem(self, -1, "Rename") + self.MyAppend(item) + self.Bind(wx.EVT_MENU, self.OnRenameTab, item) + + if len(self.ISel)==1: + item = wx.MenuItem(self, -1, "Export") + self.MyAppend(item) + self.Bind(wx.EVT_MENU, self.OnExportTab, item) + + def MyAppend(self, item): + try: + self.Append(item) # python3 + except: + self.AppendItem(item) # python2 + + def OnNaming(self, event=None): + tabPanel=self.parent.GetParent() + if self.itNameFile.IsChecked(): + tabPanel.tabList.setNaming('FileNames') + else: + tabPanel.tabList.setNaming('Ellude') + + tabPanel.updateTabNames() + + def OnDeleteTabs(self, event): + self.mainframe.deleteTabs(self.ISel) + + def OnRenameTab(self, event): + oldName = self.parent.GetString(self.ISel[0]) + dlg = wx.TextEntryDialog(self.parent, 'New table name:', 'Rename table',oldName,wx.OK|wx.CANCEL) + dlg.CentreOnParent() + if dlg.ShowModal() == wx.ID_OK: + newName=dlg.GetValue() + self.mainframe.renameTable(self.ISel[0],newName) + + def OnExportTab(self, event): + self.mainframe.exportTab(self.ISel[0]); + + def OnSort(self, event): + self.mainframe.sortTabs() + +class ColumnPopup(wx.Menu): + def __init__(self, parent, fullmenu=False): + wx.Menu.__init__(self) + self.parent = parent + self.ISel = self.parent.lbColumns.GetSelections() + + self.itShowID = wx.MenuItem(self, -1, "Show ID", kind=wx.ITEM_CHECK) + self.MyAppend(self.itShowID) + self.Bind(wx.EVT_MENU, self.OnShowID, self.itShowID) + self.Check(self.itShowID.GetId(), self.parent.bShowID) + + if self.parent.tab is not None: # TODO otherwise + item = wx.MenuItem(self, -1, "Add") + self.MyAppend(item) + self.Bind(wx.EVT_MENU, self.OnAddColumn, item) + + if len(self.ISel)==1 and self.ISel[0]>=0: + item = wx.MenuItem(self, -1, "Rename") + self.MyAppend(item) + self.Bind(wx.EVT_MENU, self.OnRenameColumn, item) + if len(self.ISel) == 1 and any( + f['pos'] == self.ISel[0] for f in self.parent.tab.formulas): + item = wx.MenuItem(self, -1, "Edit") + self.MyAppend(item) + self.Bind(wx.EVT_MENU, self.OnEditColumn, item) + if len(self.ISel)>=1 and self.ISel[0]>=0: + item = wx.MenuItem(self, -1, "Delete") + self.MyAppend(item) + self.Bind(wx.EVT_MENU, self.OnDeleteColumn, item) + + def MyAppend(self, item): + try: + self.Append(item) # python3 + except: + self.AppendItem(item) # python2 + + def OnShowID(self, event=None): + self.parent.bShowID=self.itShowID.IsChecked() + xSel,ySel,_,_ = self.parent.getColumnSelection() + self.parent.setGUIColumns(xSel=xSel, ySel=ySel) + + def OnRenameColumn(self, event=None): + iFilt = self.ISel[0] + if self.parent.bShowID: + oldName = self.parent.lbColumns.GetString(iFilt)[4:] + else: + oldName = self.parent.lbColumns.GetString(iFilt) + dlg = wx.TextEntryDialog(self.parent, 'New column name:', 'Rename column',oldName,wx.OK|wx.CANCEL) + dlg.CentreOnParent() + if dlg.ShowModal() == wx.ID_OK: + newName=dlg.GetValue() + main=self.parent.mainframe + ITab,STab=main.selPanel.getSelectedTables() + # TODO adapt me for Sim. tables mode + iFull = self.parent.Filt2Full[iFilt] + if iFull>0: # Important since -1 would rename last column of table + if main.tabList.haveSameColumns(ITab): + for iTab,sTab in zip(ITab,STab): + main.tabList.get(iTab).renameColumn(iFull-1,newName) + else: + self.parent.tab.renameColumn(iFull-1,newName) + self.parent.updateColumn(iFilt,newName) #faster + self.parent.selPanel.updateLayout() + # a trigger for the plot is required but skipped for now + + def OnEditColumn(self, event): + main=self.parent.mainframe + if len(self.ISel) != 1: + raise ValueError('Only one signal can be edited!') + ITab, STab = main.selPanel.getSelectedTables() + for iTab,sTab in zip(ITab,STab): + if sTab == self.parent.tab.active_name: + for f in main.tabList.get(iTab).formulas: + if f['pos'] == self.ISel[0]: + sName = f['name'] + sFormula = f['formula'] + break + else: + raise ValueError('No formula found at {0} for table {1}!'.format(self.ISel[0], sTab)) + self.showFormulaDialog('Edit column', sName, sFormula) + + def OnDeleteColumn(self, event): + main=self.parent.mainframe + iX = self.parent.comboX.GetSelection() + ITab,STab=main.selPanel.getSelectedTables() + # TODO adapt me for Sim. tables mode + IFull = [self.parent.Filt2Full[iFilt]-1 for iFilt in self.ISel] + IFull = [iFull for iFull in IFull if iFull>=0] + if main.tabList.haveSameColumns(ITab): + for iTab,sTab in zip(ITab,STab): + main.tabList.get(iTab).deleteColumns(IFull) + else: + self.parent.tab.deleteColumns(IFull) + self.parent.setColumns() + self.parent.setGUIColumns(xSel=iX) + main.redraw() + + def OnAddColumn(self, event): + main=self.parent.mainframe + self.showFormulaDialog('Add a new column') + + def showFormulaDialog(self, title, name='', formula=''): + bValid=False + bCancelled=False + main=self.parent.mainframe + sName=name + sFormula=formula + + if self.parent.bShowID: + columns=[no_unit(self.parent.lbColumns.GetString(i)[4:]) for i in self.ISel] + else: + columns=[no_unit(self.parent.lbColumns.GetString(i)) for i in self.ISel] + if len(self.ISel)>0: + main_unit=unit(self.parent.lbColumns.GetString(self.ISel[-1])) + else: + main_unit='' + xcol = self.parent.comboX.GetStringSelection() + xunit = unit(xcol) + xcol = no_unit(xcol) + + while (not bValid) and (not bCancelled): + dlg = FormulaDialog(title=title,columns=columns,xcol=xcol,xunit=xunit,unit=main_unit,name=sName,formula=sFormula) + dlg.CentreOnParent() + if dlg.ShowModal()==wx.ID_OK: + sName = dlg.name.GetValue() + sFormula = dlg.formula.GetValue() + dlg.Destroy() + if len(self.ISel)>0: + iFilt=self.ISel[-1] + iFull=self.parent.Filt2Full[iFilt] + else: + iFull = -1 + + ITab,STab=main.selPanel.getSelectedTables() + #if main.tabList.haveSameColumns(ITab): + sError='' + nError=0 + haveSameColumns=main.tabList.haveSameColumns(ITab) + for iTab,sTab in zip(ITab,STab): + if haveSameColumns or self.parent.tab.active_name == sTab: + # apply formula to all tables with same columns, otherwise only to active table + if title.startswith('Edit'): + bValid=main.tabList.get(iTab).setColumnByFormula(sName,sFormula,iFull) + else: + bValid=main.tabList.get(iTab).addColumnByFormula(sName,sFormula,iFull) + if not bValid: + sError+='The formula didn''t eval for table {}\n'.format(sTab) + nError+=1 + if len(sError)>0: + Error(self.parent,sError) + if nErrorlen(self.columns): + print('',self.Filt2Full) + print('',self.columns) + raise Exception('Error in Filt2Full') + return self.columns[self.Filt2Full] + + def setReadOnly(self, tabLabel='', cols=[]): + """ Set this list of columns as readonly and non selectable """ + self.tab=None + self.bReadOnly=True + self.lb.SetLabel(tabLabel) + self.setColumns(columnNames=cols) + self.setGUIColumns() + self.lbColumns.Enable(True) + self.comboX.Enable(False) + self.lbColumns.SetSelection(-1) + self.bt.Enable(False) + self.bShowID=False + self.btClear.Enable(False) + self.btFilter.Enable(False) + self.tFilter.Enable(False) + self.tFilter.SetValue('') + + def setTab(self,tab=None,xSel=-1,ySel=[],colNames=None, tabLabel=''): + """ Set the table used for the columns, update the GUI """ + self.tab=tab; + self.lbColumns.Enable(True) + self.comboX.Enable(True) + self.bReadOnly=False + self.btClear.Enable(True) + self.btFilter.Enable(True) + self.tFilter.Enable(True) + self.bt.Enable(True) + if tab is not None: + self.Filt2Full=None # TODO + if tab.active_name!='default': + self.lb.SetLabel(' '+tab.active_name) + self.setColumns() + self.setGUIColumns(xSel=xSel, ySel=ySel) + else: + self.Filt2Full=None # TODO Decide whether filter should be applied... + self.lb.SetLabel(tabLabel) + self.setColumns(columnNames=colNames) + self.setGUIColumns(xSel=xSel, ySel=ySel) + + def updateColumn(self,i,newName): + """ Update of one column name + i: index in GUI + """ + iFull = self.Filt2Full[i] + if self.bShowID: + newName='{:03d} '.format(iFull)+newName + self.lbColumns.SetString(i,newName) + self.comboX.SetString (i,newName) + self.columns[iFull] = newName + + def Full2Filt(self,iFull): + try: + return self.Filt2Full.index(iFull) + except: + return -1 + + def setColumns(self, columnNames=None): + # Get columns from user inputs, or table, or stored. + if columnNames is not None: + # Populating based on user inputs.. + columns=columnNames + elif self.tab is None: + columns=self.columns + else: + # Populating based on table (safest if table was updated) + columns=['Index']+self.tab.columns + # Storing columns, considered as "Full" + self.columns=np.array(columns) + + def setGUIColumns(self, xSel=-1, ySel=[]): + """ Set GUI columns based on self.columns and potential filter """ + # Filtering columns if neeed + sFilt = self.tFilter.GetLineText(0).strip() + if len(sFilt)>0: + Lf, If = filter_list(self.columns, sFilt) + self.Filt2Full = If + else: + self.Filt2Full = list(np.arange(len(self.columns))) + columns=self.columns[self.Filt2Full] + + # GUI update + self.Freeze() + if self.bShowID: + columnsY= ['{:03d} '.format(i)+c for i,c in enumerate(columns)] + columnsX= ['{:03d} '.format(i)+c for i,c in enumerate(self.columns)] + else: + columnsY= columns + columnsX= self.columns + self.lbColumns.Set(columnsY) # potentially filterd + # Slow line for many columns + # NOTE: limiting to 300 for now.. I'm not sure anywant would want to scroll more than that + # Consider adding a "more button" + # see e.g. https://comp.soft-sys.wxwindows.narkive.com/gDfA1Ds5/long-load-time-in-wxpython + self.comboX.Set(columnsX[:300]) # non filtered + + # Set selection for y, if any, and considering filtering + for iFull in ySel: + if iFull=0: + iFilt = self.Full2Filt(iFull) + if iFilt>0: + self.lbColumns.SetSelection(iFilt) + self.lbColumns.EnsureVisible(iFilt) + if len(self.lbColumns.GetSelections())<=0: + self.lbColumns.SetSelection(self.getDefaultColumnY(self.tab,len(columnsY)-1)) + + # Set selection for x, if any, NOTE x is not filtered! + if (xSel<0) or xSel>len(columnsX): + self.comboX.SetSelection(self.getDefaultColumnX(self.tab,len(columnsX)-1)) + else: + self.comboX.SetSelection(xSel) + self.Thaw() + + def forceOneSelection(self): + ISel=self.lbColumns.GetSelections() + self.lbColumns.SetSelection(-1) + if len(ISel)>0: + self.lbColumns.SetSelection(ISel[0]) + + def forceZeroSelection(self): + self.lbColumns.SetSelection(-1) + + def empty(self): + self.lbColumns.Clear() + self.comboX.Clear() + self.lb.SetLabel('') + self.bReadOnly=False + self.lbColumns.Enable(False) + self.comboX.Enable(False) + self.bt.Enable(False) + self.tab=None + self.columns=[] + self.Filt2Full=None + self.btClear.Enable(False) + self.btFilter.Enable(False) + self.tFilter.Enable(False) + self.tFilter.SetValue('') + + def getColumnSelection(self): + iX = self.comboX.GetSelection() + if self.bShowID: + sX = self.comboX.GetStringSelection()[4:] + else: + sX = self.comboX.GetStringSelection() + IY = self.lbColumns.GetSelections() + if self.bShowID: + SY = [self.lbColumns.GetString(i)[4:] for i in IY] + else: + SY = [self.lbColumns.GetString(i) for i in IY] + iXFull = iX # NOTE: x is always in full + IYFull = [self.Filt2Full[iY] for iY in IY] + return iXFull,IYFull,sX,SY + + def onClearFilter(self, event=None): + self.tFilter.SetValue('') + self.onFilterChange() + + def onFilterChange(self, event=None): + xSel,ySel,_,_ = self.getColumnSelection() # (indices in full) + self.setGUIColumns(xSel=xSel, ySel=ySel) # <<< Filtering done here + self.triggerPlot() # Trigger a col selection event + + def onFilterKey(self, event=None): + s=GetKeyString(event) + if s=='ESCAPE' or s=='Ctrl+C': + self.onClearFilter() + event.Skip() + + def triggerPlot(self): + event=wx.PyCommandEvent(wx.EVT_LISTBOX.typeId, self.lbColumns.GetId()) + wx.PostEvent(self.GetEventHandler(), event) + + +# --------------------------------------------------------------------------------} +# --- Selection Panel +# --------------------------------------------------------------------------------{ +class SelectionPanel(wx.Panel): + """ Display options for the user to select data """ + def __init__(self, parent, tabList, mode='auto',mainframe=None): + # Superclass constructor + super(SelectionPanel,self).__init__(parent) + # DATA + self.mainframe = mainframe + self.tabList = None + self.itabForCol = None + self.parent = parent + self.tabSelections = {} + self.tabSelected = [] # NOTE only used to remember a selection after a reload + self.modeRequested = mode + self.currentMode = None + self.nSplits = -1 + + # GUI DATA + self.splitter = MultiSplit(self, style=wx.SP_LIVE_UPDATE) + self.splitter.SetMinimumPaneSize(70) + self.tabPanel = TablePanel (self.splitter,mainframe, tabList) + self.colPanel1 = ColumnPanel(self.splitter, self, mainframe); + self.colPanel2 = ColumnPanel(self.splitter, self, mainframe); + self.colPanel3 = ColumnPanel(self.splitter, self, mainframe); + self.tabPanel.Hide() + self.colPanel1.Hide() + self.colPanel2.Hide() + self.colPanel3.Hide() + + # Layout + self.updateLayout() + VertSizer = wx.BoxSizer(wx.VERTICAL) + VertSizer.Add(self.splitter, 2, flag=wx.EXPAND, border=0) + self.SetSizer(VertSizer) + + # TRIGGERS + self.setTables(tabList) + + def updateLayout(self,mode=None): + self.Freeze() + if mode is None: + mode=self.modeRequested + else: + self.modeRequested = mode + if mode=='auto': + self.autoMode() + elif mode=='sameColumnsMode': + self.sameColumnsMode() + elif mode=='simColumnsMode': + self.simColumnsMode() + elif mode=='twoColumnsMode': + self.twoColumnsMode() + elif mode=='threeColumnsMode': + self.threeColumnsMode() + else: + self.Thaw() + raise Exception('Wrong mode for selection layout: {}'.format(mode)) + self.Thaw() + + + def autoMode(self): + ISel=self.tabPanel.lbTab.GetSelections() + if self.tabList is not None: + if self.tabList.len()<=0: + self.nSplits=-1 + self.splitter.removeAll() + elif self.tabList.haveSameColumns(): + self.sameColumnsMode() + elif self.tabList.haveSameColumns(ISel): + # We don't do same column because we know at least one table is different + # to avoid "jumping" too much + self.twoColumnsMode() + else: + # See if tables are quite similar + IKeepPerTab, IMissPerTab, IDuplPerTab, nCols = getTabCommonColIndices([self.tabList.get(i) for i in ISel]) + if np.all(np.array([len(I) for I in IMissPerTab]))=2): + self.simColumnsMode() + elif len(ISel)==2: + self.twoColumnsMode() + elif len(ISel)==3: + self.threeColumnsMode() + else: + #self.simColumnsMode(self) + raise Exception('Too many panels selected with significant columns differences.') + + def sameColumnsMode(self): + self.currentMode = 'sameColumnsMode' + if self.nSplits==1: + return + if self.nSplits==0 and self.tabList.len()<=1: + return + self.splitter.removeAll() + if self.tabList is not None: + if self.tabList.len()>1: + self.splitter.AppendWindow(self.tabPanel) + self.splitter.AppendWindow(self.colPanel1) + if self.mainframe is not None: + self.mainframe.mainFrameUpdateLayout() + if self.tabList is not None: + if self.tabList.len()<=1: + self.nSplits=0 + else: + self.nSplits=1 + else: + self.nSplits=0 + + def simColumnsMode(self): + self.currentMode = 'simColumnsMode' + self.splitter.removeAll() + self.splitter.AppendWindow(self.tabPanel) + self.splitter.AppendWindow(self.colPanel2) + self.splitter.AppendWindow(self.colPanel1) + self.splitter.setEquiSash() + if self.nSplits<2 and self.mainframe is not None: + self.mainframe.mainFrameUpdateLayout() + self.nSplits=2 + + def twoColumnsMode(self): + self.currentMode = 'twoColumnsMode' + if self.nSplits==2: + return + self.splitter.removeAll() + self.splitter.AppendWindow(self.tabPanel) + self.splitter.AppendWindow(self.colPanel2) + self.splitter.AppendWindow(self.colPanel1) + self.splitter.setEquiSash() + if self.nSplits<2 and self.mainframe is not None: + self.mainframe.mainFrameUpdateLayout() + self.nSplits=2 + + def threeColumnsMode(self): + self.currentMode = 'threeColumnsMode' + if self.nSplits==3: + return + self.splitter.removeAll() + self.splitter.AppendWindow(self.tabPanel) + self.splitter.AppendWindow(self.colPanel3) + self.splitter.AppendWindow(self.colPanel2) + self.splitter.AppendWindow(self.colPanel1) + self.splitter.setEquiSash() + if self.mainframe is not None: + self.mainframe.mainFrameUpdateLayout() + self.nSplits=3 + + def setTables(self,tabList,update=False): + """ Set the list of tables. Keeping the selection if it's an update """ + # TODO PUT ME IN TABLE PANEL + # Find a better way to remember selection + #print('UPDATING TABLES') + # Emptying GUI - TODO only if needed + self.colPanel1.empty() + self.colPanel2.empty() + self.colPanel3.empty() + # Adding + self.tabList = tabList + self.tabPanel.tabList = self.tabList + tabnames = self.tabList.tabNames + self.tabPanel.updateTabNames() + for tn in tabnames: + if tn not in self.tabSelections.keys(): + self.tabSelections[tn]={'xSel':-1,'ySel':[]} + else: + pass # do nothing + + # Reselecting + if len(self.tabSelected)>0: + # Removed line below since two column mode implemented + #if not haveSameColumns(tabs,ISel): + # ISel=[ISel[0]] + for i in self.tabSelected: + if i0: + # Trigger - updating columns and layout + ISel=self.tabPanel.lbTab.GetSelections() + self.tabSelected=ISel + if self.currentMode=='simColumnsMode': + self.setColForSimTab(ISel) + else: + if len(ISel)==1: + self.setTabForCol(ISel[0],1) + elif len(ISel)==2: + self.setTabForCol(ISel[0],1) + self.setTabForCol(ISel[1],2) + elif len(ISel)==3: + self.setTabForCol(ISel[0],1) + self.setTabForCol(ISel[1],2) + self.setTabForCol(ISel[2],3) + else: # Likely all tables have the same columns + self.setTabForCol(ISel[0],1) + self.updateLayout(self.modeRequested) + + def setTabForCol(self,iTabSel,iPanel): + t = self.tabList.get(iTabSel) + ts = self.tabSelections[t.name] + if iPanel==1: + self.colPanel1.setTab(t,ts['xSel'],ts['ySel']) + elif iPanel==2: + self.colPanel2.setTab(t,ts['xSel'],ts['ySel']) + elif iPanel==3: + self.colPanel3.setTab(t,ts['xSel'],ts['ySel']) + else: + raise Exception('Wrong ipanel') + + def setColForSimTab(self,ISel): + """ Set column panels for similar tables """ + tabs = [self.tabList.get(i) for i in ISel] + IKeepPerTab, IMissPerTab, IDuplPerTab, _ = getTabCommonColIndices(tabs) + LenMiss = np.array([len(I) for I in IMissPerTab]) + LenKeep = np.array([len(I) for I in IKeepPerTab]) + LenDupl = np.array([len(I) for I in IDuplPerTab]) + + ColInfo = ['Sim. table mode '] + ColInfo += [''] + if self.tabList.haveSameColumns(ISel): + if len(ISel)>1: + ColInfo += ['Columns identical',''] + else: + if (np.all(np.array(LenMiss)==0)): + ColInfo += ['Columns identical'] + ColInfo += ['Order different!'] + + ColInfo += ['','First difference:'] + ColInfo.append('----------------------------------') + bFirst=True + for it,t in enumerate(tabs): + print('IKeep',IKeepPerTab[it]) + if it==0: + continue + INotOrdered=[ii for i,ii in enumerate(IKeepPerTab[it]) if ii!=IKeepPerTab[0][i]] + print('INot',INotOrdered) + if len(INotOrdered)>0: + im=INotOrdered[0] + if bFirst: + ColInfo.append('{}:'.format(tabs[0].active_name)) + ColInfo.append('{:03d} {:s}'.format(im, tabs[0].columns[im])) + bFirst=False + ColInfo.append('{}:'.format(t.active_name)) + ColInfo.append('{:03d} {:s}'.format(im, t.columns[im])) + ColInfo.append('----------------------------------') + + else: + ColInfo += ['Columns different!'] + ColInfo += ['(similar: {})'.format(LenKeep[0])] + ColInfo += ['','Missing columns:'] + ColInfo.append('----------------------------------') + for it,t in enumerate(tabs): + ColInfo.append('{}:'.format(t.active_name)) + if len(IMissPerTab[it])==0: + ColInfo.append(' (None) ') + for im in IMissPerTab[it]: + ColInfo.append('{:03d} {:s}'.format(im, t.columns[im])) + ColInfo.append('----------------------------------') + + if (np.any(np.array(LenDupl)>0)): + if len(ISel)>1: + ColInfo += ['','Common duplicates:'] + else: + ColInfo += ['','Duplicates:'] + ColInfo.append('----------------------------------') + for it,t in enumerate(tabs): + ColInfo.append('{}:'.format(t.active_name)) + if len(IDuplPerTab[it])==0: + ColInfo.append(' (None) ') + for im in IDuplPerTab[it]: + ColInfo.append('{:03d} {:s}'.format(im, t.columns[im])) + ColInfo.append('----------------------------------') + + + colNames = ['Index'] + [tabs[0].columns[i] for i in IKeepPerTab[0]] + self.colPanel1.setTab(tab=None, colNames=colNames, tabLabel=' Tab. Intersection') + self.colPanel2.setReadOnly(' Tab. Difference', ColInfo) + self.IKeepPerTab=IKeepPerTab + + + + def selectDefaultTable(self): + # Selecting the first table + if self.tabPanel.lbTab.GetCount()>0: + self.tabPanel.lbTab.SetSelection(0) + self.tabSelected=[0] + else: + self.tabSelected=[] + + def tabSelectionChanged(self): + # TODO This can be cleaned-up and merged with updateLayout + #print('Tab selection change') + # Storing the previous selection + #self.printSelection() + self.saveSelection() # + #self.printSelection() + ISel=self.tabPanel.lbTab.GetSelections() + if len(ISel)>0: + if self.modeRequested=='auto': + self.autoMode() + if self.currentMode=='simColumnsMode':# and len(ISel)>1: + self.setColForSimTab(ISel) + self.tabSelected=self.tabPanel.lbTab.GetSelections() + return + + if self.tabList.haveSameColumns(ISel): + # Setting tab + self.setTabForCol(ISel[0],1) + self.colPanel2.empty() + self.colPanel3.empty() + else: + if self.nSplits==2: + if len(ISel)>2: + Error(self,'In this mode, only two tables can be selected. To compare three tables, uses the "3 different tables" mode. Otherwise the tables need to have the same columns.') + ISel=ISel[0:2] + self.tabPanel.lbTab.SetSelection(wx.NOT_FOUND) + for isel in ISel: + self.tabPanel.lbTab.SetSelection(isel) + self.colPanel3.empty() + elif self.nSplits==3: + if len(ISel)>3: + Error(self,'In this mode, only three tables can be selected. To compare more than three tables, the tables need to have the same columns.') + ISel=ISel[0:3] + self.tabPanel.lbTab.SetSelection(wx.NOT_FOUND) + for isel in ISel: + self.tabPanel.lbTab.SetSelection(isel) + else: + Error(self,'The tables selected have different columns.\n\nThis is not compatible with the "Same tables" mode. To compare them, chose one of the following mode: "2 tables", "3 tables" or "Sim. tables".') + self.colPanel2.empty() + self.colPanel3.empty() + # unselect all and select only the first one + ISel=[ISel[0]] + self.tabPanel.lbTab.SetSelection(wx.NOT_FOUND) + self.tabPanel.lbTab.SetSelection(ISel[0]) + for iPanel,iTab in enumerate(ISel): + self.setTabForCol(iTab,iPanel+1) + #print('>>>Updating tabSelected, from',self.tabSelected,'to',self.tabPanel.lbTab.GetSelections()) + self.tabSelected=self.tabPanel.lbTab.GetSelections() + + def colSelectionChanged(self): + """ Simple triggers when column selection is changed, NOTE: does not redraw """ + if self.currentMode=='simColumnsMode': + self.colPanel2.forceZeroSelection() + else: + if self.nSplits in [2,3]: + ISel=self.tabPanel.lbTab.GetSelections() + if self.tabList.haveSameColumns(ISel): + pass # TODO: this test is identical to onTabSelectionChange. Unification. + # elif len(ISel)==2: + # self.colPanel1.forceOneSelection() + # self.colPanel2.forceOneSelection() + # elif len(ISel)==3: + # self.colPanel1.forceOneSelection() + # self.colPanel2.forceOneSelection() + # self.colPanel3.forceOneSelection() + + def update_tabs(self, tabList): + self.setTables(tabList, update=True) + + def renameTable(self,iTab, oldName, newName): + #self.printSelection() + self.tabSelections[newName] = self.tabSelections.pop(oldName) + self.tabPanel.updateTabNames() + #self.printSelection() + + def saveSelection(self): + #self.ISel=self.tabPanel.lbTab.GetSelections() + ISel=self.tabSelected # + if self.tabList.haveSameColumns(ISel): + for ii in ISel: + t=self.tabList.get(ii) + self.tabSelections[t.name]['xSel'] = self.colPanel1.comboX.GetSelection() + self.tabSelections[t.name]['ySel'] = self.colPanel1.lbColumns.GetSelections() + else: + if len(ISel)>=1: + t=self.tabList.get(ISel[0]) + self.tabSelections[t.name]['xSel'] = self.colPanel1.comboX.GetSelection() + self.tabSelections[t.name]['ySel'] = self.colPanel1.lbColumns.GetSelections() + if len(ISel)>=2: + t=self.tabList.get(ISel[1]) + self.tabSelections[t.name]['xSel'] = self.colPanel2.comboX.GetSelection() + self.tabSelections[t.name]['ySel'] = self.colPanel2.lbColumns.GetSelections() + if len(ISel)>=3: + t=self.tabList.get(ISel[2]) + self.tabSelections[t.name]['xSel'] = self.colPanel3.comboX.GetSelection() + self.tabSelections[t.name]['ySel'] = self.colPanel3.lbColumns.GetSelections() + self.tabSelected = self.tabPanel.lbTab.GetSelections(); + + def printSelection(self): + print('Number of tabSelections stored:',len(self.tabSelections)) + TS=self.tabSelections + for i,tn in enumerate(self.tabList.tabNames): + if tn not in TS.keys(): + print('Tab',i,'>>> Name {} not found in selection'.format(tn)) + else: + print('Tab',i,'xSel:',TS[tn]['xSel'],'ySel:',TS[tn]['ySel'],'Name:',tn) + + def getPlotDataSelection(self): + ID = [] + SameCol=False + if self.tabList is not None and self.tabList.len()>0: + ITab,STab = self.getSelectedTables() + if self.currentMode=='simColumnsMode' and len(ITab)>1: + iiX1,IY1,ssX1,SY1 = self.colPanel1.getColumnSelection() + SameCol=False + for i,(itab,stab) in enumerate(zip(ITab,STab)): + IKeep=self.IKeepPerTab[i] + for j,(iiy,ssy) in enumerate(zip(IY1,SY1)): + if iiy==0: + iy = 0 + sy = ssy + else: + iy = IKeep[iiy-1]+1 + sy = self.tabList.get(itab).columns[IKeep[iiy-1]] + if iiX1==0: + iX1 = 0 + sX1 = ssX1 + else: + iX1 = IKeep[iiX1-1]+1 + sX1 = self.tabList.get(itab).columns[IKeep[iiX1-1]] + ID.append([itab,iX1,iy,sX1,sy,stab]) + else: + iX1,IY1,sX1,SY1 = self.colPanel1.getColumnSelection() + SameCol=self.tabList.haveSameColumns(ITab) + if self.nSplits in [0,1] or SameCol: + for i,(itab,stab) in enumerate(zip(ITab,STab)): + for j,(iy,sy) in enumerate(zip(IY1,SY1)): + ID.append([itab,iX1,iy,sX1,sy,stab]) + elif self.nSplits in [2,3]: + if len(ITab)>=1: + for j,(iy,sy) in enumerate(zip(IY1,SY1)): + ID.append([ITab[0],iX1,iy,sX1,sy,STab[0]]) + if len(ITab)>=2: + iX2,IY2,sX2,SY2 = self.colPanel2.getColumnSelection() + for j,(iy,sy) in enumerate(zip(IY2,SY2)): + ID.append([ITab[1],iX2,iy,sX2,sy,STab[1]]) + if len(ITab)>=3: + iX2,IY2,sX2,SY2 = self.colPanel3.getColumnSelection() + for j,(iy,sy) in enumerate(zip(IY2,SY2)): + ID.append([ITab[2],iX2,iy,sX2,sy,STab[2]]) + else: + raise Exception('Wrong number of splits {}'.format(self.nSplits)) + return ID,SameCol,self.currentMode + + def getSelectedTables(self): + I=self.tabPanel.lbTab.GetSelections() + S=[self.tabPanel.lbTab.GetString(i) for i in I] + return I,S + + def getAllTables(self): + I=range(self.tabPanel.lbTab.GetCount()) + S=[self.tabPanel.lbTab.GetString(i) for i in I] + return I,S + + def clean_memory(self): + self.colPanel1.empty() + self.colPanel2.empty() + self.colPanel3.empty() + self.tabPanel.empty() + del self.tabList + self.tabList=None + + @property + def xCol(self): + iX, _, sX, _ = self.colPanel1.getColumnSelection() + return iX,sX + + +if __name__ == '__main__': + import pandas as pd; + from Tables import Table + import numpy as np + + def OnTabPopup(event): + self.PopupMenu(TablePopup(self,selPanel.tabPanel.lbTab), event.GetPosition()) + + app = wx.App(False) + self=wx.Frame(None,-1,"Title") + tab=Table(data=pd.DataFrame(data={'ColA': np.random.normal(0,1,100)+1,'ColB':np.random.normal(0,1,100)+2})) + selPanel=SelectionPanel(self,[tab],mode='twoColumnsMode') + self.SetSize((800, 600)) + self.Center() + self.Show() + selPanel.tabPanel.lbTab.Bind(wx.EVT_RIGHT_DOWN, OnTabPopup) + + + app.MainLoop() + diff --git a/pydatview/GUITools.py b/pydatview/GUITools.py index d96e864..5c96168 100644 --- a/pydatview/GUITools.py +++ b/pydatview/GUITools.py @@ -1,1075 +1,1078 @@ -from __future__ import absolute_import -import wx -import numpy as np -import pandas as pd -import copy -import platform -from collections import OrderedDict - -# For log dec tool -from .common import CHAR, Error, pretty_num_short, Info -from .plotdata import PlotData -from pydatview.tools.damping import logDecFromDecay -from pydatview.tools.curve_fitting import model_fit, extract_key_miscnum, extract_key_num, MODELS, FITTERS, set_common_keys - - -TOOL_BORDER=15 - -# --------------------------------------------------------------------------------} -# --- Default class for tools -# --------------------------------------------------------------------------------{ -class GUIToolPanel(wx.Panel): - def __init__(self, parent): - super(GUIToolPanel,self).__init__(parent) - self.parent = parent - - def destroy(self,event=None): - self.parent.removeTools() - - def getBtBitmap(self,par,label,Type=None,callback=None,bitmap=False): - if Type is not None: - label=CHAR[Type]+' '+label - - bt=wx.Button(par,wx.ID_ANY, label, style=wx.BU_EXACTFIT) - #try: - # if bitmap is not None: - # bt.SetBitmapLabel(wx.ArtProvider.GetBitmap(bitmap)) #,size=(12,12))) - # else: - #except: - # pass - if callback is not None: - par.Bind(wx.EVT_BUTTON, callback, bt) - return bt - - def getToggleBtBitmap(self,par,label,Type=None,callback=None,bitmap=False): - if Type is not None: - label=CHAR[Type]+' '+label - bt=wx.ToggleButton(par,wx.ID_ANY, label, style=wx.BU_EXACTFIT) - if callback is not None: - par.Bind(wx.EVT_TOGGLEBUTTON, callback, bt) - return bt - - - -# --------------------------------------------------------------------------------} -# --- Log Dec -# --------------------------------------------------------------------------------{ -class LogDecToolPanel(GUIToolPanel): - def __init__(self, parent): - super(LogDecToolPanel,self).__init__(parent) - btClose = self.getBtBitmap(self,'Close' ,'close' ,self.destroy ) - btComp = self.getBtBitmap(self,'Compute','compute',self.onCompute) - self.lb = wx.StaticText( self, -1, ' ') - self.sizer = wx.BoxSizer(wx.HORIZONTAL) - self.sizer.Add(btClose ,0, flag = wx.LEFT|wx.CENTER,border = 1) - self.sizer.Add(btComp ,0, flag = wx.LEFT|wx.CENTER,border = 5) - self.sizer.Add(self.lb ,0, flag = wx.LEFT|wx.CENTER,border = 5) - self.SetSizer(self.sizer) - - def onCompute(self,event=None): - if len(self.parent.plotData)!=1: - Error(self,'Log Dec tool only works with a single plot.') - return - pd =self.parent.plotData[0] - try: - logdec,DampingRatio,T,fn,fd,IPos,INeg,epos,eneg=logDecFromDecay(pd.y,pd.x) - lab='LogDec.: {:.4f} - Damping ratio: {:.4f} - F_n: {:.4f} - F_d: {:.4f} - T:{:.3f}'.format(logdec,DampingRatio,fn,fd,T) - self.lb.SetLabel(lab) - self.sizer.Layout() - ax=self.parent.fig.axes[0] - ax.plot(pd.x[IPos],pd.y[IPos],'o') - ax.plot(pd.x[INeg],pd.y[INeg],'o') - ax.plot(pd.x ,epos,'k--') - ax.plot(pd.x ,eneg,'k--') - self.parent.canvas.draw() - except: - self.lb.SetLabel('Failed. The signal needs to look like the decay of a first order system.') - #self.parent.load_and_draw(); # DATA HAS CHANGED - -# --------------------------------------------------------------------------------} -# --- Outliers -# --------------------------------------------------------------------------------{ -class OutlierToolPanel(GUIToolPanel): - """ - A quick and dirty solution to manipulate plotData - I need to think of a better way to do that - """ - def __init__(self, parent): - super(OutlierToolPanel,self).__init__(parent) - self.parent = parent # parent is GUIPlotPanel - - # Setting default states to parent - if 'RemoveOutliers' not in self.parent.plotDataOptions.keys(): - self.parent.plotDataOptions['RemoveOutliers']=False - if 'OutliersMedianDeviation' not in self.parent.plotDataOptions.keys(): - self.parent.plotDataOptions['OutliersMedianDeviation']=5 - - btClose = self.getBtBitmap(self,'Close','close',self.destroy) - self.btComp = self.getToggleBtBitmap(self,'Apply','cloud',self.onToggleCompute) - - lb1 = wx.StaticText(self, -1, 'Median deviation:') -# self.tMD = wx.TextCtrl(self, wx.ID_ANY,, size = (30,-1), style=wx.TE_PROCESS_ENTER) - self.tMD = wx.SpinCtrlDouble(self, value='11', size=wx.Size(60,-1)) - self.tMD.SetValue(self.parent.plotDataOptions['OutliersMedianDeviation']) - self.tMD.SetRange(0.0, 1000) - self.tMD.SetIncrement(0.5) - - self.lb = wx.StaticText( self, -1, '') - self.sizer = wx.BoxSizer(wx.HORIZONTAL) - self.sizer.Add(btClose ,0,flag = wx.LEFT|wx.CENTER,border = 1) - self.sizer.Add(self.btComp,0,flag = wx.LEFT|wx.CENTER,border = 5) - self.sizer.Add(lb1 ,0,flag = wx.LEFT|wx.CENTER,border = 5) - self.sizer.Add(self.tMD ,0,flag = wx.LEFT|wx.CENTER,border = 5) - self.sizer.Add(self.lb ,0,flag = wx.LEFT|wx.CENTER,border = 5) - self.SetSizer(self.sizer) - - self.Bind(wx.EVT_SPINCTRLDOUBLE, self.onMDChangeArrow, self.tMD) - self.Bind(wx.EVT_TEXT_ENTER, self.onMDChangeEnter, self.tMD) - - if platform.system()=='Windows': - # See issue https://github.com/wxWidgets/Phoenix/issues/1762 - self.spintxt = self.tMD.Children[0] - assert isinstance(self.spintxt, wx.TextCtrl) - self.spintxt.Bind(wx.EVT_CHAR_HOOK, self.onMDChangeChar) - - self.onToggleCompute(init=True) - - def destroy(self,event=None): - self.parent.plotDataOptions['RemoveOutliers']=False - super(OutlierToolPanel,self).destroy() - - def onToggleCompute(self,event=None, init=False): - self.parent.plotDataOptions['OutliersMedianDeviation'] = float(self.tMD.Value) - - if not init: - self.parent.plotDataOptions['RemoveOutliers']= not self.parent.plotDataOptions['RemoveOutliers'] - - if self.parent.plotDataOptions['RemoveOutliers']: - self.lb.SetLabel('Outliers are now removed on the fly. Click "Clear" to stop.') - self.btComp.SetLabel(CHAR['sun']+' Clear') - else: - self.lb.SetLabel('Click on "Apply" to remove outliers on the fly for all new plot.') - self.btComp.SetLabel(CHAR['cloud']+' Apply') - - if not init: - self.parent.load_and_draw() # Data will change - - def onMDChange(self, event=None): - #print(self.tMD.Value) - self.parent.plotDataOptions['OutliersMedianDeviation'] = float(self.tMD.Value) - if self.parent.plotDataOptions['RemoveOutliers']: - self.parent.load_and_draw() # Data will change - - def onMDChangeArrow(self, event): - self.onMDChange() - event.Skip() - - def onMDChangeEnter(self, event): - self.onMDChange() - event.Skip() - - def onMDChangeChar(self, event): - event.Skip() - code = event.GetKeyCode() - if code in [wx.WXK_RETURN, wx.WXK_NUMPAD_ENTER]: - #print(self.spintxt.Value) - self.tMD.SetValue(self.spintxt.Value) - self.onMDChangeEnter(event) - - - -# --------------------------------------------------------------------------------} -# --- Moving Average -# --------------------------------------------------------------------------------{ -class FilterToolPanel(GUIToolPanel): - """ - Moving average/Filters - A quick and dirty solution to manipulate plotData - I need to think of a better way to do that - """ - def __init__(self, parent): - from pydatview.tools.signal import FILTERS - super(FilterToolPanel,self).__init__(parent) - self.parent = parent # parent is GUIPlotPanel - - self._DEFAULT_FILTERS=FILTERS - - # Setting default states to parent - if 'Filter' not in self.parent.plotDataOptions.keys(): - self.parent.plotDataOptions['Filter']=None - self._filterApplied = type(self.parent.plotDataOptions['Filter'])==dict - - - btClose = self.getBtBitmap(self,'Close','close',self.destroy) - self.btClear = self.getBtBitmap(self, 'Clear Plot','sun' , self.onClear) - self.btComp = self.getToggleBtBitmap(self,'Apply','cloud',self.onToggleCompute) - self.btPlot = self.getBtBitmap(self, 'Plot' ,'chart' , self.onPlot) - - lb1 = wx.StaticText(self, -1, 'Filter:') - self.cbFilters = wx.ComboBox(self, choices=[filt['name'] for filt in self._DEFAULT_FILTERS], style=wx.CB_READONLY) - self.lbParamName = wx.StaticText(self, -1, ' :') - self.cbFilters.SetSelection(0) - #self.tParam = wx.TextCtrl(self, wx.ID_ANY,, size = (30,-1), style=wx.TE_PROCESS_ENTER) - self.tParam = wx.SpinCtrlDouble(self, value='11', size=wx.Size(60,-1)) - self.lbInfo = wx.StaticText( self, -1, '') - - - # --- Layout - btSizer = wx.FlexGridSizer(rows=3, cols=2, hgap=2, vgap=0) - btSizer.Add(btClose ,0,flag = wx.ALL|wx.EXPAND, border = 1) - btSizer.Add(self.btClear ,0,flag = wx.ALL|wx.EXPAND, border = 1) - btSizer.Add(self.btComp,0,flag = wx.ALL|wx.EXPAND, border = 1) - btSizer.Add(self.btPlot ,0,flag = wx.ALL|wx.EXPAND, border = 1) - #btSizer.Add(btHelp ,0,flag = wx.ALL|wx.EXPAND, border = 1) - - horzSizer = wx.BoxSizer(wx.HORIZONTAL) - horzSizer.Add(lb1 ,0,flag = wx.LEFT|wx.CENTER,border = 5) - horzSizer.Add(self.cbFilters ,0,flag = wx.LEFT|wx.CENTER,border = 1) - horzSizer.Add(self.lbParamName ,0,flag = wx.LEFT|wx.CENTER,border = 5) - horzSizer.Add(self.tParam ,0,flag = wx.LEFT|wx.CENTER,border = 1) - - vertSizer = wx.BoxSizer(wx.VERTICAL) - vertSizer.Add(self.lbInfo ,0, flag = wx.LEFT ,border = 5) - vertSizer.Add(horzSizer ,1, flag = wx.LEFT|wx.EXPAND,border = 1) - - self.sizer = wx.BoxSizer(wx.HORIZONTAL) - self.sizer.Add(btSizer ,0, flag = wx.LEFT ,border = 1) - self.sizer.Add(vertSizer ,1, flag = wx.EXPAND|wx.LEFT ,border = 1) - self.SetSizer(self.sizer) - - # --- Events - self.cbFilters.Bind(wx.EVT_COMBOBOX, self.onSelectFilt) - self.Bind(wx.EVT_SPINCTRLDOUBLE, self.onParamChangeArrow, self.tParam) - self.Bind(wx.EVT_TEXT_ENTER, self.onParamChangeEnter, self.tParam) - if platform.system()=='Windows': - # See issue https://github.com/wxWidgets/Phoenix/issues/1762 - self.spintxt = self.tParam.Children[0] - assert isinstance(self.spintxt, wx.TextCtrl) - self.spintxt.Bind(wx.EVT_CHAR_HOOK, self.onParamChangeChar) - - self.onSelectFilt() - self.onToggleCompute(init=True) - - def destroy(self,event=None): - self.parent.plotDataOptions['Filter']=None - super(FilterToolPanel,self).destroy() - - - def onSelectFilt(self, event=None): - """ Select the filter, but does not applied it to the plotData - parentFilt is unchanged - But if the parent already has - """ - iFilt = self.cbFilters.GetSelection() - filt = self._DEFAULT_FILTERS[iFilt] - self.lbParamName.SetLabel(filt['paramName']+':') - self.tParam.SetRange(filt['paramRange'][0], filt['paramRange'][1]) - self.tParam.SetIncrement(filt['increment']) - - parentFilt=self.parent.plotDataOptions['Filter'] - # Value - if type(parentFilt)==dict and parentFilt['name']==filt['name']: - self.tParam.SetValue(parentFilt['param']) - else: - self.tParam.SetValue(filt['param']) - - def onToggleCompute(self, event=None, init=False): - """ - apply Filter based on GUI Data - """ - parentFilt=self.parent.plotDataOptions['Filter'] - if not init: - self._filterApplied = not self._filterApplied - - if self._filterApplied: - self.parent.plotDataOptions['Filter'] =self._GUI2Filt() - #print('Apply', self.parent.plotDataOptions['Filter']) - self.lbInfo.SetLabel( - 'Filter is now applied on the fly. Change parameter live. Click "Clear" to stop. ' - ) - self.btPlot.Enable(False) - self.btClear.Enable(False) - self.btComp.SetLabel(CHAR['sun']+' Clear') - else: - self.parent.plotDataOptions['Filter'] = None - self.lbInfo.SetLabel( - 'Click on "Apply" to set filter on the fly for all plots. '+ - 'Click on "Plot" to try a filter on the current plot.' - ) - self.btPlot.Enable(True) - self.btClear.Enable(True) - self.btComp.SetLabel(CHAR['cloud']+' Apply') - - if not init: - self.parent.load_and_draw() # Data will change - - pass - - def _GUI2Filt(self): - iFilt = self.cbFilters.GetSelection() - filt = self._DEFAULT_FILTERS[iFilt].copy() - filt['param']=np.float(self.spintxt.Value) - return filt - - def onPlot(self, event=None): - """ - Overlay on current axis the filter - """ - from pydatview.tools.signal import applyFilter - if len(self.parent.plotData)!=1: - Error(self,'Plotting only works for a single plot. Plot less data.') - return - filt=self._GUI2Filt() - - PD = self.parent.plotData[0] - y_filt = applyFilter(PD.x0, PD.y0, filt) - ax = self.parent.fig.axes[0] - - PD_new = PlotData() - PD_new.fromXY(PD.x0, y_filt) - self.parent.transformPlotData(PD_new) - ax.plot(PD_new.x, PD_new.y, '-') - self.parent.canvas.draw() - - def onClear(self, event): - self.parent.load_and_draw() # Data will change - - def onParamChange(self, event=None): - if self._filterApplied: - self.parent.plotDataOptions['Filter'] =self._GUI2Filt() - #print('OnParamChange', self.parent.plotDataOptions['Filter']) - self.parent.load_and_draw() # Data will change - - def onParamChangeArrow(self, event): - self.onParamChange() - event.Skip() - - def onParamChangeEnter(self, event): - self.onParamChange() - event.Skip() - - def onParamChangeChar(self, event): - event.Skip() - code = event.GetKeyCode() - if code in [wx.WXK_RETURN, wx.WXK_NUMPAD_ENTER]: - #print(self.spintxt.Value) - self.tParam.SetValue(self.spintxt.Value) - self.onParamChangeEnter(event) - - -# --------------------------------------------------------------------------------} -# --- Resample -# --------------------------------------------------------------------------------{ -class ResampleToolPanel(GUIToolPanel): - def __init__(self, parent): - super(ResampleToolPanel,self).__init__(parent) - - # --- Data from other modules - from pydatview.tools.signal import SAMPLERS - self.parent = parent # parent is GUIPlotPanel - self._SAMPLERS=SAMPLERS - # Setting default states to parent - if 'Sampler' not in self.parent.plotDataOptions.keys(): - self.parent.plotDataOptions['Sampler']=None - self._applied = type(self.parent.plotDataOptions['Sampler'])==dict - - - # --- GUI elements - self.btClose = self.getBtBitmap(self, 'Close','close', self.destroy) - self.btAdd = self.getBtBitmap(self, 'Add','add' , self.onAdd) - self.btPlot = self.getBtBitmap(self, 'Plot' ,'chart' , self.onPlot) - self.btClear = self.getBtBitmap(self, 'Clear Plot','sun', self.onClear) - self.btApply = self.getToggleBtBitmap(self,'Apply','cloud',self.onToggleApply) - self.btHelp = self.getBtBitmap(self, 'Help','help', self.onHelp) - - #self.lb = wx.StaticText( self, -1, """ Click help """) - self.cbTabs = wx.ComboBox(self, -1, choices=[], style=wx.CB_READONLY) - self.cbMethods = wx.ComboBox(self, -1, choices=[s['name'] for s in self._SAMPLERS], style=wx.CB_READONLY) - - self.lbNewX = wx.StaticText(self, -1, 'New x: ') - self.textNewX = wx.TextCtrl(self, wx.ID_ANY, '', style = wx.TE_PROCESS_ENTER) - self.textOldX = wx.TextCtrl(self, wx.ID_ANY|wx.TE_READONLY) - self.textOldX.Enable(False) - - # --- Layout - btSizer = wx.FlexGridSizer(rows=3, cols=2, hgap=2, vgap=0) - btSizer.Add(self.btClose , 0, flag = wx.ALL|wx.EXPAND, border = 1) - btSizer.Add(self.btClear , 0, flag = wx.ALL|wx.EXPAND, border = 1) - btSizer.Add(self.btAdd , 0, flag = wx.ALL|wx.EXPAND, border = 1) - btSizer.Add(self.btPlot , 0, flag = wx.ALL|wx.EXPAND, border = 1) - btSizer.Add(self.btHelp , 0, flag = wx.ALL|wx.EXPAND, border = 1) - btSizer.Add(self.btApply , 0, flag = wx.ALL|wx.EXPAND, border = 1) - - msizer = wx.FlexGridSizer(rows=2, cols=4, hgap=2, vgap=0) - msizer.Add(wx.StaticText(self, -1, 'Table:') , 0, wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM, 1) - msizer.Add(self.cbTabs , 0, wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM, 1) - msizer.Add(wx.StaticText(self, -1, 'Current x: '), 0, wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM, 1) - msizer.Add(self.textOldX , 1, wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM|wx.EXPAND, 1) - msizer.Add(wx.StaticText(self, -1, 'Method:') , 0, wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM, 1) - msizer.Add(self.cbMethods , 0, wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM, 1) - msizer.Add(self.lbNewX , 0, wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM, 1) - msizer.Add(self.textNewX , 1, wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM|wx.EXPAND, 1) - msizer.AddGrowableCol(3,1) - - self.sizer = wx.BoxSizer(wx.HORIZONTAL) - self.sizer.Add(btSizer ,0, flag = wx.LEFT ,border = 5) - self.sizer.Add(msizer ,1, flag = wx.LEFT|wx.EXPAND ,border = TOOL_BORDER) - self.SetSizer(self.sizer) - - # --- Events - self.cbTabs.Bind (wx.EVT_COMBOBOX, self.onTabChange) - self.cbMethods.Bind(wx.EVT_COMBOBOX, self.onMethodChange) - self.textNewX.Bind(wx.EVT_TEXT_ENTER,self.onParamChange) - - # --- Init triggers - self.cbMethods.SetSelection(3) - self.onMethodChange(init=True) - self.onToggleApply(init=True) - self.updateTabList() - self.textNewX.SetValue('2') - - def setCurrentX(self, x=None): - if x is None: - x= self.parent.plotData[0].x - if len(x)<50: - s=np.array2string(x, separator=', ') - else: - s =np.array2string(x[[0,1,2,3]], separator=', ') - s+=', ..., ' - s+=np.array2string(x[[-3,-2,-1]], separator=', ') - s=s.replace('[','').replace(']','').replace(' ','').replace(',',', ') - - self.textOldX.SetValue(s) - - def onMethodChange(self, event=None, init=True): - """ Select the method, but does not applied it to the plotData - User data and option is unchanged - But if the user already has some options, they are used - """ - iOpt = self.cbMethods.GetSelection() - opt = self._SAMPLERS[iOpt] - self.lbNewX.SetLabel(opt['paramName']+':') - - parentOpt=self.parent.plotDataOptions['Sampler'] - # Value - if len(self.textNewX.Value)==0: - if type(parentOpt)==dict: - self.textNewX.SetValue(str(parentOpt['param'])[1:-1]) - else: - self.textNewX.SetValue(str(opt['param'])[2:-2]) - self.onParamChange() - - def onParamChange(self, event=None): - if self._applied: - self.parent.plotDataOptions['Sampler'] =self._GUI2Data() - self.parent.load_and_draw() # Data will change - self.setCurrentX() - - def _GUI2Data(self): - iOpt = self.cbMethods.GetSelection() - opt = self._SAMPLERS[iOpt].copy() - s= self.textNewX.Value.strip().replace('[','').replace(']','') - if len(s)>0: - if s.find(','): - opt['param']=np.array(s.split(',')).astype(float) - else: - opt['param']=np.array(s.split('')).astype(float) - return opt - - def onToggleApply(self, event=None, init=False): - """ - apply sampler based on GUI Data - """ - parentFilt=self.parent.plotDataOptions['Sampler'] - if not init: - self._applied = not self._applied - - if self._applied: - self.parent.plotDataOptions['Sampler'] =self._GUI2Data() - #print('Apply', self.parent.plotDataOptions['Sampler']) - #self.lbInfo.SetLabel( - # 'Sampler is now applied on the fly. Change parameter live. Click "Clear" to stop. ' - # ) - self.btPlot.Enable(False) - self.btClear.Enable(False) - self.btApply.SetLabel(CHAR['sun']+' Clear') - else: - self.parent.plotDataOptions['Sampler'] = None - #self.lbInfo.SetLabel( - # 'Click on "Apply" to set filter on the fly for all plots. '+ - # 'Click on "Plot" to try a filter on the current plot.' - # ) - self.btPlot.Enable(True) - self.btClear.Enable(True) - self.btApply.SetLabel(CHAR['cloud']+' Apply') - - if not init: - self.parent.load_and_draw() # Data will change - self.setCurrentX() - - - def onAdd(self,event=None): - iSel = self.cbTabs.GetSelection() - tabList = self.parent.selPanel.tabList - mainframe = self.parent.mainframe - icol, colname = self.parent.selPanel.xCol - print(icol,colname) - opt = self._GUI2Data() - errors=[] - if iSel==0: - dfs, names, errors = tabList.applyResampling(icol, opt, bAdd=True) - mainframe.load_dfs(dfs,names,bAdd=True) - else: - df, name = tabList.get(iSel-1).applyResampling(icol, opt, bAdd=True) - mainframe.load_df(df,name,bAdd=True) - self.updateTabList() - - if len(errors)>0: - raise Exception('Error: The resampling failed on some tables:\n\n'+'\n'.join(errors)) - - def onPlot(self,event=None): - from pydatview.tools.signal import applySampler - if len(self.parent.plotData)!=1: - Error(self,'Plotting only works for a single plot. Plot less data.') - return - opts=self._GUI2Data() - PD = self.parent.plotData[0] - x_new, y_new = applySampler(PD.x0, PD.y0, opts) - ax = self.parent.fig.axes[0] - - PD_new = PlotData() - PD_new.fromXY(x_new, y_new) - self.parent.transformPlotData(PD_new) - ax.plot(PD_new.x, PD_new.y, '-') - self.setCurrentX(x_new) - - self.parent.canvas.draw() - - def onClear(self,event=None): - self.parent.load_and_draw() # Data will change - # Update Current X - self.setCurrentX() - # Update Table list - self.updateTabList() - - - def onTabChange(self,event=None): - #tabList = self.parent.selPanel.tabList - #iSel=self.cbTabs.GetSelection() - pass - - def updateTabList(self,event=None): - tabList = self.parent.selPanel.tabList - tabListNames = ['All opened tables']+tabList.getDisplayTabNames() - try: - iSel=np.max([np.min([self.cbTabs.GetSelection(),len(tabListNames)]),0]) - self.cbTabs.Clear() - [self.cbTabs.Append(tn) for tn in tabListNames] - self.cbTabs.SetSelection(iSel) - except RuntimeError: - pass - - def onHelp(self,event=None): - Info(self,"""Resampling. - -The resampling operation changes the "x" values of a table/plot and -adapt the "y" values accordingly. - -To resample perform the following step: - -- Chose a resampling method: - - replace: specify all the new x-values - - insert : insert a list of x values to the existing ones - - delete : delete a list of x values from the existing ones - - every-n : use every n values - - time-based: downsample using sample averaging or upsample using - linear interpolation, x-axis must already be in seconds - - delta x : specify a delta for uniform spacing of x values - -- Specify the x values as a space or comma separated list - -- Click on one of the following buttons: - - Plot: will display the resampled data on the figure - - Apply: will perform the resampling on the fly for all new plots - - Add: will create new table(s) with resampled values for all - signals. This process might take some time. - Select a table or choose all (default) -""") - - - -# --------------------------------------------------------------------------------} -# --- Mask -# --------------------------------------------------------------------------------{ -class MaskToolPanel(GUIToolPanel): - def __init__(self, parent): - super(MaskToolPanel,self).__init__(parent) - - tabList = self.parent.selPanel.tabList - tabListNames = ['All opened tables']+tabList.getDisplayTabNames() - - allMask = tabList.commonMaskString - if len(allMask)==0: - allMask=self.guessMask(tabList) # no known mask, we guess one to help the user - self.applied=False - else: - self.applied=True - - btClose = self.getBtBitmap(self, 'Close','close', self.destroy) - btComp = self.getBtBitmap(self, u'Mask (add)','add' , self.onApply) - if self.applied: - self.btCompMask = self.getToggleBtBitmap(self, 'Clear','sun', self.onToggleApplyMask) - self.btCompMask.SetValue(True) - else: - self.btCompMask = self.getToggleBtBitmap(self, 'Mask','cloud', self.onToggleApplyMask) - - self.lb = wx.StaticText( self, -1, """(Example of mask: "({Time}>100) && ({Time}<50) && ({WS}==5)" or "{Date} > '2018-10-01'")""") - self.cbTabs = wx.ComboBox(self, choices=tabListNames, style=wx.CB_READONLY) - self.cbTabs.SetSelection(0) - - self.textMask = wx.TextCtrl(self, wx.ID_ANY, allMask) - #self.textMask.SetValue('({Time}>100) & ({Time}<400)') - #self.textMask.SetValue("{Date} > '2018-10-01'") - - btSizer = wx.FlexGridSizer(rows=2, cols=2, hgap=2, vgap=0) - btSizer.Add(btClose ,0,flag = wx.ALL|wx.EXPAND, border = 1) - btSizer.Add(wx.StaticText(self, -1, '') ,0,flag = wx.ALL|wx.EXPAND, border = 1) - btSizer.Add(btComp ,0,flag = wx.ALL|wx.EXPAND, border = 1) - btSizer.Add(self.btCompMask ,0,flag = wx.ALL|wx.EXPAND, border = 1) - - row_sizer = wx.BoxSizer(wx.HORIZONTAL) - row_sizer.Add(wx.StaticText(self, -1, 'Tab:') , 0, wx.CENTER|wx.LEFT, 0) - row_sizer.Add(self.cbTabs , 0, wx.CENTER|wx.LEFT, 2) - row_sizer.Add(wx.StaticText(self, -1, 'Mask:'), 0, wx.CENTER|wx.LEFT, 5) - row_sizer.Add(self.textMask, 1, wx.CENTER|wx.LEFT|wx.EXPAND, 5) - - vert_sizer = wx.BoxSizer(wx.VERTICAL) - vert_sizer.Add(self.lb ,0, flag = wx.ALIGN_LEFT|wx.TOP|wx.BOTTOM, border = 5) - vert_sizer.Add(row_sizer ,1, flag = wx.EXPAND|wx.ALIGN_LEFT|wx.TOP|wx.BOTTOM, border = 5) - - self.sizer = wx.BoxSizer(wx.HORIZONTAL) - self.sizer.Add(btSizer ,0, flag = wx.LEFT ,border = 5) - self.sizer.Add(vert_sizer ,1, flag = wx.LEFT|wx.EXPAND ,border = TOOL_BORDER) - self.SetSizer(self.sizer) - self.Bind(wx.EVT_COMBOBOX, self.onTabChange, self.cbTabs ) - - def onTabChange(self,event=None): - tabList = self.parent.selPanel.tabList - iSel=self.cbTabs.GetSelection() - if iSel==0: - maskString = tabList.commonMaskString - else: - maskString= tabList.get(iSel-1).maskString - if len(maskString)>0: - self.textMask.SetValue(maskString) - #else: - # self.textMask.SetValue('') # no known mask - # self.textMask.SetValue(self.guessMask) # no known mask - - def guessMask(self,tabList): - cols=[c.lower() for c in tabList.get(0).columns_clean] - if 'time' in cols: - return '{Time} > 100' - elif 'date' in cols: - return "{Date} > '2017-01-01" - else: - return '' - - def onClear(self,event=None): - iSel = self.cbTabs.GetSelection() - tabList = self.parent.selPanel.tabList - mainframe = self.parent.mainframe - if iSel==0: - tabList.clearCommonMask() - else: - tabList.get(iSel-1).clearMask() - - mainframe.redraw() - self.onTabChange() - - def onToggleApplyMask(self,event=None): - self.applied = not self.applied - if self.applied: - self.btCompMask.SetLabel(CHAR['sun']+' Clear') - else: - self.btCompMask.SetLabel(CHAR['cloud']+' Mask') - - if self.applied: - self.onApply(event,bAdd=False) - else: - self.onClear() - - def onApply(self,event=None,bAdd=True): - maskString = self.textMask.GetLineText(0) - iSel = self.cbTabs.GetSelection() - tabList = self.parent.selPanel.tabList - mainframe = self.parent.mainframe - if iSel==0: - dfs, names, errors = tabList.applyCommonMaskString(maskString, bAdd=bAdd) - if bAdd: - mainframe.load_dfs(dfs,names,bAdd=bAdd) - else: - mainframe.redraw() - if len(errors)>0: - raise Exception('Error: The mask failed on some tables:\n\n'+'\n'.join(errors)) - else: - dfs, name = tabList.get(iSel-1).applyMaskString(maskString, bAdd=bAdd) - if bAdd: - mainframe.load_df(df,name,bAdd=bAdd) - else: - mainframe.redraw() - self.updateTabList() - - - def updateTabList(self,event=None): - tabList = self.parent.selPanel.tabList - tabListNames = ['All opened tables']+tabList.getDisplayTabNames() - try: - iSel=np.min([self.cbTabs.GetSelection(),len(tabListNames)]) - self.cbTabs.Clear() - [self.cbTabs.Append(tn) for tn in tabListNames] - self.cbTabs.SetSelection(iSel) - except RuntimeError: - pass - -# --------------------------------------------------------------------------------} -# --- Radial -# --------------------------------------------------------------------------------{ -sAVG_METHODS = ['Last `n` seconds','Last `n` periods'] -AVG_METHODS = ['constantwindow','periods'] - -class RadialToolPanel(GUIToolPanel): - def __init__(self, parent): - super(RadialToolPanel,self).__init__(parent) - - tabList = self.parent.selPanel.tabList - tabListNames = ['All opened tables']+tabList.getDisplayTabNames() - - btClose = self.getBtBitmap(self,'Close' ,'close' , self.destroy) - btComp = self.getBtBitmap(self,'Average','compute', self.onApply) # ART_PLUS - - self.lb = wx.StaticText( self, -1, """Select tables, averaging method and average parameter (`Period` methods uses the `azimuth` signal) """) - self.cbTabs = wx.ComboBox(self, choices=tabListNames, style=wx.CB_READONLY) - self.cbMethod = wx.ComboBox(self, choices=sAVG_METHODS, style=wx.CB_READONLY) - self.cbTabs.SetSelection(0) - self.cbMethod.SetSelection(0) - - self.textAverageParam = wx.TextCtrl(self, wx.ID_ANY, '2',size = (36,-1), style=wx.TE_PROCESS_ENTER) - - btSizer = wx.FlexGridSizer(rows=2, cols=1, hgap=0, vgap=0) - #btSizer = wx.BoxSizer(wx.VERTICAL) - btSizer.Add(btClose ,0, flag = wx.ALL|wx.EXPAND, border = 1) - btSizer.Add(btComp ,0, flag = wx.ALL|wx.EXPAND, border = 1) - - row_sizer = wx.BoxSizer(wx.HORIZONTAL) - row_sizer.Add(wx.StaticText(self, -1, 'Tab:') , 0, wx.CENTER|wx.LEFT, 0) - row_sizer.Add(self.cbTabs , 0, wx.CENTER|wx.LEFT, 2) - row_sizer.Add(wx.StaticText(self, -1, 'Method:'), 0, wx.CENTER|wx.LEFT, 5) - row_sizer.Add(self.cbMethod , 0, wx.CENTER|wx.LEFT, 2) - row_sizer.Add(wx.StaticText(self, -1, 'Param:') , 0, wx.CENTER|wx.LEFT, 5) - row_sizer.Add(self.textAverageParam , 0, wx.CENTER|wx.LEFT|wx.RIGHT| wx.EXPAND, 2) - - vert_sizer = wx.BoxSizer(wx.VERTICAL) - vert_sizer.Add(self.lb ,0, flag =wx.ALIGN_LEFT|wx.TOP|wx.BOTTOM,border = 5) - vert_sizer.Add(row_sizer ,0, flag =wx.ALIGN_LEFT|wx.TOP|wx.BOTTOM,border = 5) - - self.sizer = wx.BoxSizer(wx.HORIZONTAL) - self.sizer.Add(btSizer ,0, flag = wx.LEFT ,border = 5) - self.sizer.Add(vert_sizer ,0, flag = wx.LEFT|wx.EXPAND,border = TOOL_BORDER) - self.SetSizer(self.sizer) - self.Bind(wx.EVT_COMBOBOX, self.onTabChange, self.cbTabs ) - - def onTabChange(self,event=None): - tabList = self.parent.selPanel.tabList - - def onApply(self,event=None): - try: - avgParam = float(self.textAverageParam.GetLineText(0)) - except: - raise Exception('Error: the averaging parameter needs to be an integer or a float') - iSel = self.cbTabs.GetSelection() - avgMethod = AVG_METHODS[self.cbMethod.GetSelection()] - tabList = self.parent.selPanel.tabList - mainframe = self.parent.mainframe - if iSel==0: - dfs, names, errors = tabList.radialAvg(avgMethod,avgParam) - mainframe.load_dfs(dfs,names,bAdd=True) - if len(errors)>0: - raise Exception('Error: The mask failed on some tables:\n\n'+'\n'.join(errors)) - else: - dfs, names = tabList.get(iSel-1).radialAvg(avgMethod,avgParam) - mainframe.load_dfs(dfs,names,bAdd=True) - - self.updateTabList() - - def updateTabList(self,event=None): - tabList = self.parent.selPanel.tabList - tabListNames = ['All opened tables']+tabList.getDisplayTabNames() - iSel=np.min([self.cbTabs.GetSelection(),len(tabListNames)]) - self.cbTabs.Clear() - [self.cbTabs.Append(tn) for tn in tabListNames] - self.cbTabs.SetSelection(iSel) - - -# --------------------------------------------------------------------------------} -# --- Curve Fitting -# --------------------------------------------------------------------------------{ -MODELS_EXAMPLE =[ - {'label':'User defined model', 'id':'eval:', - 'formula':'{a}*x**2 + {b}', - 'coeffs':None, - 'consts':None, - 'bounds':None }, - ] -MODELS_EXTRA =[ -# {'label':'Exponential decay', 'id':'eval:', -# 'formula':'{A}*exp(-{k}*x)+{B}', -# 'coeffs' :'k=1, A=1, B=0', -# 'consts' :None, -# 'bounds' :None}, -] - -class CurveFitToolPanel(GUIToolPanel): - def __init__(self, parent): - super(CurveFitToolPanel,self).__init__(parent) - - # Data - self.x = None - self.y_fit = None - - # GUI Objecst - btClose = self.getBtBitmap(self, 'Close','close', self.destroy) - btClear = self.getBtBitmap(self, 'Clear','sun', self.onClear) # DELETE - btAdd = self.getBtBitmap(self, 'Add','add' , self.onAdd) - btCompFit = self.getBtBitmap(self, 'Fit','check', self.onCurveFit) - btHelp = self.getBtBitmap(self, 'Help','help', self.onHelp) - - boldFont = self.GetFont().Bold() - lbOutputs = wx.StaticText(self, -1, 'Outputs') - lbInputs = wx.StaticText(self, -1, 'Inputs ') - lbOutputs.SetFont(boldFont) - lbInputs.SetFont(boldFont) - - self.textFormula = wx.TextCtrl(self, wx.ID_ANY, '') - self.textGuess = wx.TextCtrl(self, wx.ID_ANY, '') - self.textBounds = wx.TextCtrl(self, wx.ID_ANY, '') - self.textConstants = wx.TextCtrl(self, wx.ID_ANY, '') - - self.textFormulaNum = wx.TextCtrl(self, wx.ID_ANY, '', style=wx.TE_READONLY) - self.textCoeffs = wx.TextCtrl(self, wx.ID_ANY, '', style=wx.TE_READONLY) - self.textInfo = wx.TextCtrl(self, wx.ID_ANY, '', style=wx.TE_READONLY) - - - self.Models=copy.deepcopy(MODELS_EXAMPLE) + copy.deepcopy(FITTERS) + copy.deepcopy(MODELS) + copy.deepcopy(MODELS_EXTRA) - sModels=[d['label'] for d in self.Models] - - - self.cbModels = wx.ComboBox(self, choices=sModels, style=wx.CB_READONLY) - self.cbModels.SetSelection(0) - - btSizer = wx.FlexGridSizer(rows=3, cols=2, hgap=2, vgap=0) - btSizer.Add(btClose ,0,flag = wx.ALL|wx.EXPAND, border = 1) - btSizer.Add(btClear ,0,flag = wx.ALL|wx.EXPAND, border = 1) - btSizer.Add(btAdd ,0,flag = wx.ALL|wx.EXPAND, border = 1) - btSizer.Add(btCompFit ,0,flag = wx.ALL|wx.EXPAND, border = 1) - btSizer.Add(btHelp ,0,flag = wx.ALL|wx.EXPAND, border = 1) - - inputSizer = wx.FlexGridSizer(rows=5, cols=2, hgap=0, vgap=0) - inputSizer.Add(lbInputs ,0, flag=wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM,border = 1) - inputSizer.Add(self.cbModels ,1, flag=wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM|wx.EXPAND,border = 1) - inputSizer.Add(wx.StaticText(self, -1, 'Formula:') ,0, flag=wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM,border = 1) - inputSizer.Add(self.textFormula ,1, flag=wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM|wx.EXPAND,border = 1) - inputSizer.Add(wx.StaticText(self, -1, 'Guess:') ,0, flag=wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM,border = 1) - inputSizer.Add(self.textGuess ,1, flag=wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM|wx.EXPAND,border = 1) - inputSizer.Add(wx.StaticText(self, -1, 'Bounds:') ,0, flag=wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM,border = 1) - inputSizer.Add(self.textBounds ,1, flag=wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM|wx.EXPAND,border = 1) - inputSizer.Add(wx.StaticText(self, -1, 'Constants:'),0, flag=wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM,border = 1) - inputSizer.Add(self.textConstants ,1, flag=wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM|wx.EXPAND,border = 1) - inputSizer.AddGrowableCol(1,1) - - outputSizer = wx.FlexGridSizer(rows=5, cols=2, hgap=0, vgap=0) - outputSizer.Add(lbOutputs ,0 , flag=wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM,border = 1) - outputSizer.Add(wx.StaticText(self, -1, '') ,0 , flag=wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM,border = 1) - outputSizer.Add(wx.StaticText(self, -1, 'Formula:'),0 , flag=wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM,border = 1) - outputSizer.Add(self.textFormulaNum ,1 , flag=wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM|wx.EXPAND,border = 1) - outputSizer.Add(wx.StaticText(self, -1, 'Parameters:') ,0 , flag=wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM,border = 1) - outputSizer.Add(self.textCoeffs ,1 , flag=wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM|wx.EXPAND,border = 1) - outputSizer.Add(wx.StaticText(self, -1, 'Accuracy:') ,0 , flag=wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM,border = 1) - outputSizer.Add(self.textInfo ,1 , flag=wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM|wx.EXPAND,border = 1) - outputSizer.AddGrowableCol(1,0.5) - - horzSizer = wx.BoxSizer(wx.HORIZONTAL) - horzSizer.Add(inputSizer ,1.0, flag = wx.LEFT|wx.EXPAND,border = 2) - horzSizer.Add(outputSizer ,1.0, flag = wx.LEFT|wx.EXPAND,border = 9) - - vertSizer = wx.BoxSizer(wx.VERTICAL) -# vertSizer.Add(self.lbHelp ,0, flag = wx.LEFT ,border = 1) - vertSizer.Add(horzSizer ,1, flag = wx.LEFT|wx.EXPAND,border = 1) - - self.sizer = wx.BoxSizer(wx.HORIZONTAL) - self.sizer.Add(btSizer ,0, flag = wx.LEFT ,border = 1) -# self.sizer.Add(vertSizerCB ,0, flag = wx.LEFT ,border = 1) - self.sizer.Add(vertSizer ,1, flag = wx.EXPAND|wx.LEFT ,border = 1) - self.SetSizer(self.sizer) - - self.Bind(wx.EVT_COMBOBOX, self.onModelChange, self.cbModels) - - self.onModelChange() - - def onModelChange(self,event=None): - iModel = self.cbModels.GetSelection() - d = self.Models[iModel] - self.textFormula.SetEditable(True) - - if d['id'].find('fitter:')==0 : - self.textGuess.Enable(False) - self.textGuess.SetValue('') - self.textFormula.Enable(False) - self.textFormula.SetValue(d['formula']) - self.textBounds.Enable(False) - self.textBounds.SetValue('') - self.textConstants.Enable(True) - # NOTE: conversion to string works with list, and tuples, not numpy array - val = ', '.join([k+'='+str(v) for k,v in d['consts'].items()]) - self.textConstants.SetValue(val) - else: - # Formula - if d['id'].find('eval:')==0 : - self.textFormula.Enable(True) - self.textFormula.SetEditable(True) - else: - #self.textFormula.Enable(False) - self.textFormula.Enable(True) - self.textFormula.SetEditable(False) - self.textFormula.SetValue(d['formula']) - - # Guess - if d['coeffs'] is None: - self.textGuess.SetValue('') - else: - self.textGuess.SetValue(d['coeffs']) - - # Constants - if d['consts'] is None or len(d['consts'].strip())==0: - self.textConstants.Enable(False) - self.textConstants.SetValue('') - else: - self.textConstants.Enable(True) - self.textConstants.SetValue(d['consts']) - - # Bounds - self.textBounds.Enable(True) - if d['bounds'] is None or len(d['bounds'].strip())==0: - self.textBounds.SetValue('all=(-np.inf, np.inf)') - else: - self.textBounds.SetValue(d['bounds']) - - # Outputs - self.textFormulaNum.SetValue('(Click on Fit)') - self.textCoeffs.SetValue('') - self.textInfo.SetValue('') - - def onCurveFit(self,event=None): - self.x = None - self.y_fit = None - if len(self.parent.plotData)!=1: - Error(self,'Curve fitting tool only works with a single curve. Plot less data.') - return - PD =self.parent.plotData[0] - ax =self.parent.fig.axes[0] - # Restricting data to axes visible bounds on the x axis - xlim= ax.get_xlim() - b=np.logical_and(PD.x>=xlim[0], PD.x<=xlim[1]) - - iModel = self.cbModels.GetSelection() - d = self.Models[iModel] - - if d['id'].find('fitter:')==0 : - sFunc=d['id'] - p0=None - bounds=None - fun_kwargs=extract_key_miscnum(self.textConstants.GetLineText(0).replace('np.inf','inf')) - else: - # Formula - sFunc=d['id'] - if sFunc=='eval:': - sFunc+=self.textFormula.GetLineText(0) - # Bounds - bounds=self.textBounds.GetLineText(0).replace('np.inf','inf') - # Guess - p0=self.textGuess.GetLineText(0).replace('np.inf','inf') - fun_kwargs=extract_key_num(self.textConstants.GetLineText(0).replace('np.inf','inf')) - #print('>>> Model fit sFunc :',sFunc ) - #print('>>> Model fit p0 :',p0 ) - #print('>>> Model fit bounds:',bounds ) - #print('>>> Model fit kwargs:',fun_kwargs) - # Performing fit - y_fit, pfit, fitter = model_fit(sFunc, PD.x[b], PD.y[b], p0=p0, bounds=bounds,**fun_kwargs) - - formatter = lambda x: pretty_num_short(x, digits=3) - formula_num = fitter.formula_num(fmt=formatter) - # Update info - self.textFormulaNum.SetValue(formula_num) - self.textCoeffs.SetValue(', '.join(['{}={:s}'.format(k,formatter(v)) for k,v in fitter.model['coeffs'].items()])) - self.textInfo.SetValue('R2 = {:.3f} '.format(fitter.model['R2'])) - - # Saving - d['formula'] = self.textFormula.GetLineText(0) - d['bounds'] = self.textBounds.GetLineText(0).replace('np.inf','inf') - d['coeffs'] = self.textGuess.GetLineText(0).replace('np.inf','inf') - if d['id'].find('fitter:')==0 : - d['consts'], _ = set_common_keys(d['consts'],fun_kwargs) - else: - d['consts']= self.textConstants.GetLineText(0).replace('np.inf','inf') - - - # Plot - ax=self.parent.fig.axes[0] - ax.plot(PD.x[b],y_fit,'o', ms=4) - self.parent.canvas.draw() - - self.x=PD.x[b] - self.y_fit=y_fit - self.sx=PD.sx - self.sy=PD.sy - - def onClear(self,event=None): - self.parent.load_and_draw() # DATA HAS CHANGED - self.onModelChange() - - def onAdd(self,event=None): - name='model_fit' - if self.x is not None and self.y_fit is not None: - df=pd.DataFrame({self.sx:self.x, self.sy:self.y_fit}) - self.parent.mainframe.load_df(df,name,bAdd=True) - - def onHelp(self,event=None): - Info(self,"""Curve fitting is still in beta. - -To perform a curve fit, adjusts the "Inputs section on the left": -- Select a predefined equation to fit, using the scrolldown menu. -- Adjust the initial gues for the parameters (if wanted) -- (Only for few models: set constants values) -- Click on "Fit" - -If you select a user-defined model: -- Equation parameters are specified using curly brackets -- Numpy functions are available using "np" - -Buttons: -- Clear: remove the fit from the plot -- Add: add the fit data to the list of tables (can then be exported) - -""") - - - -TOOLS={ - 'LogDec': LogDecToolPanel, - 'Outlier': OutlierToolPanel, - 'Filter': FilterToolPanel, - 'Resample': ResampleToolPanel, - 'Mask': MaskToolPanel, - 'FASTRadialAverage': RadialToolPanel, - 'CurveFitting': CurveFitToolPanel, -} +from __future__ import absolute_import +import wx +import numpy as np +import pandas as pd +import copy +import platform +from collections import OrderedDict + +# For log dec tool +from .common import CHAR, Error, pretty_num_short, Info +from .plotdata import PlotData +from pydatview.tools.damping import logDecFromDecay +from pydatview.tools.curve_fitting import model_fit, extract_key_miscnum, extract_key_num, MODELS, FITTERS, set_common_keys + + +TOOL_BORDER=15 + +# --------------------------------------------------------------------------------} +# --- Default class for tools +# --------------------------------------------------------------------------------{ +class GUIToolPanel(wx.Panel): + def __init__(self, parent): + super(GUIToolPanel,self).__init__(parent) + self.parent = parent + + def destroy(self,event=None): + self.parent.removeTools() + + def getBtBitmap(self,par,label,Type=None,callback=None,bitmap=False): + if Type is not None: + label=CHAR[Type]+' '+label + + bt=wx.Button(par,wx.ID_ANY, label, style=wx.BU_EXACTFIT) + #try: + # if bitmap is not None: + # bt.SetBitmapLabel(wx.ArtProvider.GetBitmap(bitmap)) #,size=(12,12))) + # else: + #except: + # pass + if callback is not None: + par.Bind(wx.EVT_BUTTON, callback, bt) + return bt + + def getToggleBtBitmap(self,par,label,Type=None,callback=None,bitmap=False): + if Type is not None: + label=CHAR[Type]+' '+label + bt=wx.ToggleButton(par,wx.ID_ANY, label, style=wx.BU_EXACTFIT) + if callback is not None: + par.Bind(wx.EVT_TOGGLEBUTTON, callback, bt) + return bt + + + +# --------------------------------------------------------------------------------} +# --- Log Dec +# --------------------------------------------------------------------------------{ +class LogDecToolPanel(GUIToolPanel): + def __init__(self, parent): + super(LogDecToolPanel,self).__init__(parent) + btClose = self.getBtBitmap(self,'Close' ,'close' ,self.destroy ) + btComp = self.getBtBitmap(self,'Compute','compute',self.onCompute) + self.lb = wx.StaticText( self, -1, ' ') + self.sizer = wx.BoxSizer(wx.HORIZONTAL) + self.sizer.Add(btClose ,0, flag = wx.LEFT|wx.CENTER,border = 1) + self.sizer.Add(btComp ,0, flag = wx.LEFT|wx.CENTER,border = 5) + self.sizer.Add(self.lb ,0, flag = wx.LEFT|wx.CENTER,border = 5) + self.SetSizer(self.sizer) + + def onCompute(self,event=None): + if len(self.parent.plotData)!=1: + Error(self,'Log Dec tool only works with a single plot.') + return + pd =self.parent.plotData[0] + try: + logdec,DampingRatio,T,fn,fd,IPos,INeg,epos,eneg=logDecFromDecay(pd.y,pd.x) + lab='LogDec.: {:.4f} - Damping ratio: {:.4f} - F_n: {:.4f} - F_d: {:.4f} - T:{:.3f}'.format(logdec,DampingRatio,fn,fd,T) + self.lb.SetLabel(lab) + self.sizer.Layout() + ax=self.parent.fig.axes[0] + ax.plot(pd.x[IPos],pd.y[IPos],'o') + ax.plot(pd.x[INeg],pd.y[INeg],'o') + ax.plot(pd.x ,epos,'k--') + ax.plot(pd.x ,eneg,'k--') + self.parent.canvas.draw() + except: + self.lb.SetLabel('Failed. The signal needs to look like the decay of a first order system.') + #self.parent.load_and_draw(); # DATA HAS CHANGED + +# --------------------------------------------------------------------------------} +# --- Outliers +# --------------------------------------------------------------------------------{ +class OutlierToolPanel(GUIToolPanel): + """ + A quick and dirty solution to manipulate plotData + I need to think of a better way to do that + """ + def __init__(self, parent): + super(OutlierToolPanel,self).__init__(parent) + self.parent = parent # parent is GUIPlotPanel + + # Setting default states to parent + if 'RemoveOutliers' not in self.parent.plotDataOptions.keys(): + self.parent.plotDataOptions['RemoveOutliers']=False + if 'OutliersMedianDeviation' not in self.parent.plotDataOptions.keys(): + self.parent.plotDataOptions['OutliersMedianDeviation']=5 + + btClose = self.getBtBitmap(self,'Close','close',self.destroy) + self.btComp = self.getToggleBtBitmap(self,'Apply','cloud',self.onToggleCompute) + + lb1 = wx.StaticText(self, -1, 'Median deviation:') +# self.tMD = wx.TextCtrl(self, wx.ID_ANY,, size = (30,-1), style=wx.TE_PROCESS_ENTER) + self.tMD = wx.SpinCtrlDouble(self, value='11', size=wx.Size(60,-1)) + self.tMD.SetValue(self.parent.plotDataOptions['OutliersMedianDeviation']) + self.tMD.SetRange(0.0, 1000) + self.tMD.SetIncrement(0.5) + + self.lb = wx.StaticText( self, -1, '') + self.sizer = wx.BoxSizer(wx.HORIZONTAL) + self.sizer.Add(btClose ,0,flag = wx.LEFT|wx.CENTER,border = 1) + self.sizer.Add(self.btComp,0,flag = wx.LEFT|wx.CENTER,border = 5) + self.sizer.Add(lb1 ,0,flag = wx.LEFT|wx.CENTER,border = 5) + self.sizer.Add(self.tMD ,0,flag = wx.LEFT|wx.CENTER,border = 5) + self.sizer.Add(self.lb ,0,flag = wx.LEFT|wx.CENTER,border = 5) + self.SetSizer(self.sizer) + + self.Bind(wx.EVT_SPINCTRLDOUBLE, self.onMDChangeArrow, self.tMD) + self.Bind(wx.EVT_TEXT_ENTER, self.onMDChangeEnter, self.tMD) + + if platform.system()=='Windows': + # See issue https://github.com/wxWidgets/Phoenix/issues/1762 + self.spintxt = self.tMD.Children[0] + assert isinstance(self.spintxt, wx.TextCtrl) + self.spintxt.Bind(wx.EVT_CHAR_HOOK, self.onMDChangeChar) + + self.onToggleCompute(init=True) + + def destroy(self,event=None): + self.parent.plotDataOptions['RemoveOutliers']=False + super(OutlierToolPanel,self).destroy() + + def onToggleCompute(self,event=None, init=False): + self.parent.plotDataOptions['OutliersMedianDeviation'] = float(self.tMD.Value) + + if not init: + self.parent.plotDataOptions['RemoveOutliers']= not self.parent.plotDataOptions['RemoveOutliers'] + + if self.parent.plotDataOptions['RemoveOutliers']: + self.lb.SetLabel('Outliers are now removed on the fly. Click "Clear" to stop.') + self.btComp.SetLabel(CHAR['sun']+' Clear') + else: + self.lb.SetLabel('Click on "Apply" to remove outliers on the fly for all new plot.') + self.btComp.SetLabel(CHAR['cloud']+' Apply') + + if not init: + self.parent.load_and_draw() # Data will change + + def onMDChange(self, event=None): + #print(self.tMD.Value) + self.parent.plotDataOptions['OutliersMedianDeviation'] = float(self.tMD.Value) + if self.parent.plotDataOptions['RemoveOutliers']: + self.parent.load_and_draw() # Data will change + + def onMDChangeArrow(self, event): + self.onMDChange() + event.Skip() + + def onMDChangeEnter(self, event): + self.onMDChange() + event.Skip() + + def onMDChangeChar(self, event): + event.Skip() + code = event.GetKeyCode() + if code in [wx.WXK_RETURN, wx.WXK_NUMPAD_ENTER]: + #print(self.spintxt.Value) + self.tMD.SetValue(self.spintxt.Value) + self.onMDChangeEnter(event) + + + +# --------------------------------------------------------------------------------} +# --- Moving Average +# --------------------------------------------------------------------------------{ +class FilterToolPanel(GUIToolPanel): + """ + Moving average/Filters + A quick and dirty solution to manipulate plotData + I need to think of a better way to do that + """ + def __init__(self, parent): + from pydatview.tools.signal import FILTERS + super(FilterToolPanel,self).__init__(parent) + self.parent = parent # parent is GUIPlotPanel + + self._DEFAULT_FILTERS=FILTERS + + # Setting default states to parent + if 'Filter' not in self.parent.plotDataOptions.keys(): + self.parent.plotDataOptions['Filter']=None + self._filterApplied = type(self.parent.plotDataOptions['Filter'])==dict + + + btClose = self.getBtBitmap(self,'Close','close',self.destroy) + self.btClear = self.getBtBitmap(self, 'Clear Plot','sun' , self.onClear) + self.btComp = self.getToggleBtBitmap(self,'Apply','cloud',self.onToggleCompute) + self.btPlot = self.getBtBitmap(self, 'Plot' ,'chart' , self.onPlot) + + lb1 = wx.StaticText(self, -1, 'Filter:') + self.cbFilters = wx.ComboBox(self, choices=[filt['name'] for filt in self._DEFAULT_FILTERS], style=wx.CB_READONLY) + self.lbParamName = wx.StaticText(self, -1, ' :') + self.cbFilters.SetSelection(0) + #self.tParam = wx.TextCtrl(self, wx.ID_ANY,, size = (30,-1), style=wx.TE_PROCESS_ENTER) + self.tParam = wx.SpinCtrlDouble(self, value='11', size=wx.Size(60,-1)) + self.lbInfo = wx.StaticText( self, -1, '') + + + # --- Layout + btSizer = wx.FlexGridSizer(rows=3, cols=2, hgap=2, vgap=0) + btSizer.Add(btClose ,0,flag = wx.ALL|wx.EXPAND, border = 1) + btSizer.Add(self.btClear ,0,flag = wx.ALL|wx.EXPAND, border = 1) + btSizer.Add(self.btComp,0,flag = wx.ALL|wx.EXPAND, border = 1) + btSizer.Add(self.btPlot ,0,flag = wx.ALL|wx.EXPAND, border = 1) + #btSizer.Add(btHelp ,0,flag = wx.ALL|wx.EXPAND, border = 1) + + horzSizer = wx.BoxSizer(wx.HORIZONTAL) + horzSizer.Add(lb1 ,0,flag = wx.LEFT|wx.CENTER,border = 5) + horzSizer.Add(self.cbFilters ,0,flag = wx.LEFT|wx.CENTER,border = 1) + horzSizer.Add(self.lbParamName ,0,flag = wx.LEFT|wx.CENTER,border = 5) + horzSizer.Add(self.tParam ,0,flag = wx.LEFT|wx.CENTER,border = 1) + + vertSizer = wx.BoxSizer(wx.VERTICAL) + vertSizer.Add(self.lbInfo ,0, flag = wx.LEFT ,border = 5) + vertSizer.Add(horzSizer ,1, flag = wx.LEFT|wx.EXPAND,border = 1) + + self.sizer = wx.BoxSizer(wx.HORIZONTAL) + self.sizer.Add(btSizer ,0, flag = wx.LEFT ,border = 1) + self.sizer.Add(vertSizer ,1, flag = wx.EXPAND|wx.LEFT ,border = 1) + self.SetSizer(self.sizer) + + # --- Events + self.cbFilters.Bind(wx.EVT_COMBOBOX, self.onSelectFilt) + self.Bind(wx.EVT_SPINCTRLDOUBLE, self.onParamChangeArrow, self.tParam) + self.Bind(wx.EVT_TEXT_ENTER, self.onParamChangeEnter, self.tParam) + if platform.system()=='Windows': + # See issue https://github.com/wxWidgets/Phoenix/issues/1762 + self.spintxt = self.tParam.Children[0] + assert isinstance(self.spintxt, wx.TextCtrl) + self.spintxt.Bind(wx.EVT_CHAR_HOOK, self.onParamChangeChar) + + self.onSelectFilt() + self.onToggleCompute(init=True) + + def destroy(self,event=None): + self.parent.plotDataOptions['Filter']=None + super(FilterToolPanel,self).destroy() + + + def onSelectFilt(self, event=None): + """ Select the filter, but does not applied it to the plotData + parentFilt is unchanged + But if the parent already has + """ + iFilt = self.cbFilters.GetSelection() + filt = self._DEFAULT_FILTERS[iFilt] + self.lbParamName.SetLabel(filt['paramName']+':') + self.tParam.SetRange(filt['paramRange'][0], filt['paramRange'][1]) + self.tParam.SetIncrement(filt['increment']) + + parentFilt=self.parent.plotDataOptions['Filter'] + # Value + if type(parentFilt)==dict and parentFilt['name']==filt['name']: + self.tParam.SetValue(parentFilt['param']) + else: + self.tParam.SetValue(filt['param']) + + def onToggleCompute(self, event=None, init=False): + """ + apply Filter based on GUI Data + """ + parentFilt=self.parent.plotDataOptions['Filter'] + if not init: + self._filterApplied = not self._filterApplied + + if self._filterApplied: + self.parent.plotDataOptions['Filter'] =self._GUI2Filt() + #print('Apply', self.parent.plotDataOptions['Filter']) + self.lbInfo.SetLabel( + 'Filter is now applied on the fly. Change parameter live. Click "Clear" to stop. ' + ) + self.btPlot.Enable(False) + self.btClear.Enable(False) + self.btComp.SetLabel(CHAR['sun']+' Clear') + else: + self.parent.plotDataOptions['Filter'] = None + self.lbInfo.SetLabel( + 'Click on "Apply" to set filter on the fly for all plots. '+ + 'Click on "Plot" to try a filter on the current plot.' + ) + self.btPlot.Enable(True) + self.btClear.Enable(True) + self.btComp.SetLabel(CHAR['cloud']+' Apply') + + if not init: + self.parent.load_and_draw() # Data will change + + pass + + def _GUI2Filt(self): + iFilt = self.cbFilters.GetSelection() + filt = self._DEFAULT_FILTERS[iFilt].copy() + try: + filt['param']=np.float(self.spintxt.Value) + except: + print('[WARN] pyDatView: Issue on Mac: GUITools.py/_GUI2Filt. Help needed.') + return filt + + def onPlot(self, event=None): + """ + Overlay on current axis the filter + """ + from pydatview.tools.signal import applyFilter + if len(self.parent.plotData)!=1: + Error(self,'Plotting only works for a single plot. Plot less data.') + return + filt=self._GUI2Filt() + + PD = self.parent.plotData[0] + y_filt = applyFilter(PD.x0, PD.y0, filt) + ax = self.parent.fig.axes[0] + + PD_new = PlotData() + PD_new.fromXY(PD.x0, y_filt) + self.parent.transformPlotData(PD_new) + ax.plot(PD_new.x, PD_new.y, '-') + self.parent.canvas.draw() + + def onClear(self, event): + self.parent.load_and_draw() # Data will change + + def onParamChange(self, event=None): + if self._filterApplied: + self.parent.plotDataOptions['Filter'] =self._GUI2Filt() + #print('OnParamChange', self.parent.plotDataOptions['Filter']) + self.parent.load_and_draw() # Data will change + + def onParamChangeArrow(self, event): + self.onParamChange() + event.Skip() + + def onParamChangeEnter(self, event): + self.onParamChange() + event.Skip() + + def onParamChangeChar(self, event): + event.Skip() + code = event.GetKeyCode() + if code in [wx.WXK_RETURN, wx.WXK_NUMPAD_ENTER]: + #print(self.spintxt.Value) + self.tParam.SetValue(self.spintxt.Value) + self.onParamChangeEnter(event) + + +# --------------------------------------------------------------------------------} +# --- Resample +# --------------------------------------------------------------------------------{ +class ResampleToolPanel(GUIToolPanel): + def __init__(self, parent): + super(ResampleToolPanel,self).__init__(parent) + + # --- Data from other modules + from pydatview.tools.signal import SAMPLERS + self.parent = parent # parent is GUIPlotPanel + self._SAMPLERS=SAMPLERS + # Setting default states to parent + if 'Sampler' not in self.parent.plotDataOptions.keys(): + self.parent.plotDataOptions['Sampler']=None + self._applied = type(self.parent.plotDataOptions['Sampler'])==dict + + + # --- GUI elements + self.btClose = self.getBtBitmap(self, 'Close','close', self.destroy) + self.btAdd = self.getBtBitmap(self, 'Add','add' , self.onAdd) + self.btPlot = self.getBtBitmap(self, 'Plot' ,'chart' , self.onPlot) + self.btClear = self.getBtBitmap(self, 'Clear Plot','sun', self.onClear) + self.btApply = self.getToggleBtBitmap(self,'Apply','cloud',self.onToggleApply) + self.btHelp = self.getBtBitmap(self, 'Help','help', self.onHelp) + + #self.lb = wx.StaticText( self, -1, """ Click help """) + self.cbTabs = wx.ComboBox(self, -1, choices=[], style=wx.CB_READONLY) + self.cbMethods = wx.ComboBox(self, -1, choices=[s['name'] for s in self._SAMPLERS], style=wx.CB_READONLY) + + self.lbNewX = wx.StaticText(self, -1, 'New x: ') + self.textNewX = wx.TextCtrl(self, wx.ID_ANY, '', style = wx.TE_PROCESS_ENTER) + self.textOldX = wx.TextCtrl(self, wx.ID_ANY|wx.TE_READONLY) + self.textOldX.Enable(False) + + # --- Layout + btSizer = wx.FlexGridSizer(rows=3, cols=2, hgap=2, vgap=0) + btSizer.Add(self.btClose , 0, flag = wx.ALL|wx.EXPAND, border = 1) + btSizer.Add(self.btClear , 0, flag = wx.ALL|wx.EXPAND, border = 1) + btSizer.Add(self.btAdd , 0, flag = wx.ALL|wx.EXPAND, border = 1) + btSizer.Add(self.btPlot , 0, flag = wx.ALL|wx.EXPAND, border = 1) + btSizer.Add(self.btHelp , 0, flag = wx.ALL|wx.EXPAND, border = 1) + btSizer.Add(self.btApply , 0, flag = wx.ALL|wx.EXPAND, border = 1) + + msizer = wx.FlexGridSizer(rows=2, cols=4, hgap=2, vgap=0) + msizer.Add(wx.StaticText(self, -1, 'Table:') , 0, wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM, 1) + msizer.Add(self.cbTabs , 0, wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM, 1) + msizer.Add(wx.StaticText(self, -1, 'Current x: '), 0, wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM, 1) + msizer.Add(self.textOldX , 1, wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM|wx.EXPAND, 1) + msizer.Add(wx.StaticText(self, -1, 'Method:') , 0, wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM, 1) + msizer.Add(self.cbMethods , 0, wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM, 1) + msizer.Add(self.lbNewX , 0, wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM, 1) + msizer.Add(self.textNewX , 1, wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM|wx.EXPAND, 1) + msizer.AddGrowableCol(3,1) + + self.sizer = wx.BoxSizer(wx.HORIZONTAL) + self.sizer.Add(btSizer ,0, flag = wx.LEFT ,border = 5) + self.sizer.Add(msizer ,1, flag = wx.LEFT|wx.EXPAND ,border = TOOL_BORDER) + self.SetSizer(self.sizer) + + # --- Events + self.cbTabs.Bind (wx.EVT_COMBOBOX, self.onTabChange) + self.cbMethods.Bind(wx.EVT_COMBOBOX, self.onMethodChange) + self.textNewX.Bind(wx.EVT_TEXT_ENTER,self.onParamChange) + + # --- Init triggers + self.cbMethods.SetSelection(3) + self.onMethodChange(init=True) + self.onToggleApply(init=True) + self.updateTabList() + self.textNewX.SetValue('2') + + def setCurrentX(self, x=None): + if x is None: + x= self.parent.plotData[0].x + if len(x)<50: + s=np.array2string(x, separator=', ') + else: + s =np.array2string(x[[0,1,2,3]], separator=', ') + s+=', ..., ' + s+=np.array2string(x[[-3,-2,-1]], separator=', ') + s=s.replace('[','').replace(']','').replace(' ','').replace(',',', ') + + self.textOldX.SetValue(s) + + def onMethodChange(self, event=None, init=True): + """ Select the method, but does not applied it to the plotData + User data and option is unchanged + But if the user already has some options, they are used + """ + iOpt = self.cbMethods.GetSelection() + opt = self._SAMPLERS[iOpt] + self.lbNewX.SetLabel(opt['paramName']+':') + + parentOpt=self.parent.plotDataOptions['Sampler'] + # Value + if len(self.textNewX.Value)==0: + if type(parentOpt)==dict: + self.textNewX.SetValue(str(parentOpt['param'])[1:-1]) + else: + self.textNewX.SetValue(str(opt['param'])[2:-2]) + self.onParamChange() + + def onParamChange(self, event=None): + if self._applied: + self.parent.plotDataOptions['Sampler'] =self._GUI2Data() + self.parent.load_and_draw() # Data will change + self.setCurrentX() + + def _GUI2Data(self): + iOpt = self.cbMethods.GetSelection() + opt = self._SAMPLERS[iOpt].copy() + s= self.textNewX.Value.strip().replace('[','').replace(']','') + if len(s)>0: + if s.find(','): + opt['param']=np.array(s.split(',')).astype(float) + else: + opt['param']=np.array(s.split('')).astype(float) + return opt + + def onToggleApply(self, event=None, init=False): + """ + apply sampler based on GUI Data + """ + parentFilt=self.parent.plotDataOptions['Sampler'] + if not init: + self._applied = not self._applied + + if self._applied: + self.parent.plotDataOptions['Sampler'] =self._GUI2Data() + #print('Apply', self.parent.plotDataOptions['Sampler']) + #self.lbInfo.SetLabel( + # 'Sampler is now applied on the fly. Change parameter live. Click "Clear" to stop. ' + # ) + self.btPlot.Enable(False) + self.btClear.Enable(False) + self.btApply.SetLabel(CHAR['sun']+' Clear') + else: + self.parent.plotDataOptions['Sampler'] = None + #self.lbInfo.SetLabel( + # 'Click on "Apply" to set filter on the fly for all plots. '+ + # 'Click on "Plot" to try a filter on the current plot.' + # ) + self.btPlot.Enable(True) + self.btClear.Enable(True) + self.btApply.SetLabel(CHAR['cloud']+' Apply') + + if not init: + self.parent.load_and_draw() # Data will change + self.setCurrentX() + + + def onAdd(self,event=None): + iSel = self.cbTabs.GetSelection() + tabList = self.parent.selPanel.tabList + mainframe = self.parent.mainframe + icol, colname = self.parent.selPanel.xCol + print(icol,colname) + opt = self._GUI2Data() + errors=[] + if iSel==0: + dfs, names, errors = tabList.applyResampling(icol, opt, bAdd=True) + mainframe.load_dfs(dfs,names,bAdd=True) + else: + df, name = tabList.get(iSel-1).applyResampling(icol, opt, bAdd=True) + mainframe.load_df(df,name,bAdd=True) + self.updateTabList() + + if len(errors)>0: + raise Exception('Error: The resampling failed on some tables:\n\n'+'\n'.join(errors)) + + def onPlot(self,event=None): + from pydatview.tools.signal import applySampler + if len(self.parent.plotData)!=1: + Error(self,'Plotting only works for a single plot. Plot less data.') + return + opts=self._GUI2Data() + PD = self.parent.plotData[0] + x_new, y_new = applySampler(PD.x0, PD.y0, opts) + ax = self.parent.fig.axes[0] + + PD_new = PlotData() + PD_new.fromXY(x_new, y_new) + self.parent.transformPlotData(PD_new) + ax.plot(PD_new.x, PD_new.y, '-') + self.setCurrentX(x_new) + + self.parent.canvas.draw() + + def onClear(self,event=None): + self.parent.load_and_draw() # Data will change + # Update Current X + self.setCurrentX() + # Update Table list + self.updateTabList() + + + def onTabChange(self,event=None): + #tabList = self.parent.selPanel.tabList + #iSel=self.cbTabs.GetSelection() + pass + + def updateTabList(self,event=None): + tabList = self.parent.selPanel.tabList + tabListNames = ['All opened tables']+tabList.getDisplayTabNames() + try: + iSel=np.max([np.min([self.cbTabs.GetSelection(),len(tabListNames)]),0]) + self.cbTabs.Clear() + [self.cbTabs.Append(tn) for tn in tabListNames] + self.cbTabs.SetSelection(iSel) + except RuntimeError: + pass + + def onHelp(self,event=None): + Info(self,"""Resampling. + +The resampling operation changes the "x" values of a table/plot and +adapt the "y" values accordingly. + +To resample perform the following step: + +- Chose a resampling method: + - replace: specify all the new x-values + - insert : insert a list of x values to the existing ones + - delete : delete a list of x values from the existing ones + - every-n : use every n values + - time-based: downsample using sample averaging or upsample using + linear interpolation, x-axis must already be in seconds + - delta x : specify a delta for uniform spacing of x values + +- Specify the x values as a space or comma separated list + +- Click on one of the following buttons: + - Plot: will display the resampled data on the figure + - Apply: will perform the resampling on the fly for all new plots + - Add: will create new table(s) with resampled values for all + signals. This process might take some time. + Select a table or choose all (default) +""") + + + +# --------------------------------------------------------------------------------} +# --- Mask +# --------------------------------------------------------------------------------{ +class MaskToolPanel(GUIToolPanel): + def __init__(self, parent): + super(MaskToolPanel,self).__init__(parent) + + tabList = self.parent.selPanel.tabList + tabListNames = ['All opened tables']+tabList.getDisplayTabNames() + + allMask = tabList.commonMaskString + if len(allMask)==0: + allMask=self.guessMask(tabList) # no known mask, we guess one to help the user + self.applied=False + else: + self.applied=True + + btClose = self.getBtBitmap(self, 'Close','close', self.destroy) + btComp = self.getBtBitmap(self, u'Mask (add)','add' , self.onApply) + if self.applied: + self.btCompMask = self.getToggleBtBitmap(self, 'Clear','sun', self.onToggleApplyMask) + self.btCompMask.SetValue(True) + else: + self.btCompMask = self.getToggleBtBitmap(self, 'Mask','cloud', self.onToggleApplyMask) + + self.lb = wx.StaticText( self, -1, """(Example of mask: "({Time}>100) && ({Time}<50) && ({WS}==5)" or "{Date} > '2018-10-01'")""") + self.cbTabs = wx.ComboBox(self, choices=tabListNames, style=wx.CB_READONLY) + self.cbTabs.SetSelection(0) + + self.textMask = wx.TextCtrl(self, wx.ID_ANY, allMask) + #self.textMask.SetValue('({Time}>100) & ({Time}<400)') + #self.textMask.SetValue("{Date} > '2018-10-01'") + + btSizer = wx.FlexGridSizer(rows=2, cols=2, hgap=2, vgap=0) + btSizer.Add(btClose ,0,flag = wx.ALL|wx.EXPAND, border = 1) + btSizer.Add(wx.StaticText(self, -1, '') ,0,flag = wx.ALL|wx.EXPAND, border = 1) + btSizer.Add(btComp ,0,flag = wx.ALL|wx.EXPAND, border = 1) + btSizer.Add(self.btCompMask ,0,flag = wx.ALL|wx.EXPAND, border = 1) + + row_sizer = wx.BoxSizer(wx.HORIZONTAL) + row_sizer.Add(wx.StaticText(self, -1, 'Tab:') , 0, wx.CENTER|wx.LEFT, 0) + row_sizer.Add(self.cbTabs , 0, wx.CENTER|wx.LEFT, 2) + row_sizer.Add(wx.StaticText(self, -1, 'Mask:'), 0, wx.CENTER|wx.LEFT, 5) + row_sizer.Add(self.textMask, 1, wx.CENTER|wx.LEFT|wx.EXPAND, 5) + + vert_sizer = wx.BoxSizer(wx.VERTICAL) + vert_sizer.Add(self.lb ,0, flag = wx.ALIGN_LEFT|wx.TOP|wx.BOTTOM, border = 5) + vert_sizer.Add(row_sizer ,1, flag = wx.EXPAND|wx.ALIGN_LEFT|wx.TOP|wx.BOTTOM, border = 5) + + self.sizer = wx.BoxSizer(wx.HORIZONTAL) + self.sizer.Add(btSizer ,0, flag = wx.LEFT ,border = 5) + self.sizer.Add(vert_sizer ,1, flag = wx.LEFT|wx.EXPAND ,border = TOOL_BORDER) + self.SetSizer(self.sizer) + self.Bind(wx.EVT_COMBOBOX, self.onTabChange, self.cbTabs ) + + def onTabChange(self,event=None): + tabList = self.parent.selPanel.tabList + iSel=self.cbTabs.GetSelection() + if iSel==0: + maskString = tabList.commonMaskString + else: + maskString= tabList.get(iSel-1).maskString + if len(maskString)>0: + self.textMask.SetValue(maskString) + #else: + # self.textMask.SetValue('') # no known mask + # self.textMask.SetValue(self.guessMask) # no known mask + + def guessMask(self,tabList): + cols=[c.lower() for c in tabList.get(0).columns_clean] + if 'time' in cols: + return '{Time} > 100' + elif 'date' in cols: + return "{Date} > '2017-01-01" + else: + return '' + + def onClear(self,event=None): + iSel = self.cbTabs.GetSelection() + tabList = self.parent.selPanel.tabList + mainframe = self.parent.mainframe + if iSel==0: + tabList.clearCommonMask() + else: + tabList.get(iSel-1).clearMask() + + mainframe.redraw() + self.onTabChange() + + def onToggleApplyMask(self,event=None): + self.applied = not self.applied + if self.applied: + self.btCompMask.SetLabel(CHAR['sun']+' Clear') + else: + self.btCompMask.SetLabel(CHAR['cloud']+' Mask') + + if self.applied: + self.onApply(event,bAdd=False) + else: + self.onClear() + + def onApply(self,event=None,bAdd=True): + maskString = self.textMask.GetLineText(0) + iSel = self.cbTabs.GetSelection() + tabList = self.parent.selPanel.tabList + mainframe = self.parent.mainframe + if iSel==0: + dfs, names, errors = tabList.applyCommonMaskString(maskString, bAdd=bAdd) + if bAdd: + mainframe.load_dfs(dfs,names,bAdd=bAdd) + else: + mainframe.redraw() + if len(errors)>0: + raise Exception('Error: The mask failed on some tables:\n\n'+'\n'.join(errors)) + else: + dfs, name = tabList.get(iSel-1).applyMaskString(maskString, bAdd=bAdd) + if bAdd: + mainframe.load_df(df,name,bAdd=bAdd) + else: + mainframe.redraw() + self.updateTabList() + + + def updateTabList(self,event=None): + tabList = self.parent.selPanel.tabList + tabListNames = ['All opened tables']+tabList.getDisplayTabNames() + try: + iSel=np.min([self.cbTabs.GetSelection(),len(tabListNames)]) + self.cbTabs.Clear() + [self.cbTabs.Append(tn) for tn in tabListNames] + self.cbTabs.SetSelection(iSel) + except RuntimeError: + pass + +# --------------------------------------------------------------------------------} +# --- Radial +# --------------------------------------------------------------------------------{ +sAVG_METHODS = ['Last `n` seconds','Last `n` periods'] +AVG_METHODS = ['constantwindow','periods'] + +class RadialToolPanel(GUIToolPanel): + def __init__(self, parent): + super(RadialToolPanel,self).__init__(parent) + + tabList = self.parent.selPanel.tabList + tabListNames = ['All opened tables']+tabList.getDisplayTabNames() + + btClose = self.getBtBitmap(self,'Close' ,'close' , self.destroy) + btComp = self.getBtBitmap(self,'Average','compute', self.onApply) # ART_PLUS + + self.lb = wx.StaticText( self, -1, """Select tables, averaging method and average parameter (`Period` methods uses the `azimuth` signal) """) + self.cbTabs = wx.ComboBox(self, choices=tabListNames, style=wx.CB_READONLY) + self.cbMethod = wx.ComboBox(self, choices=sAVG_METHODS, style=wx.CB_READONLY) + self.cbTabs.SetSelection(0) + self.cbMethod.SetSelection(0) + + self.textAverageParam = wx.TextCtrl(self, wx.ID_ANY, '2',size = (36,-1), style=wx.TE_PROCESS_ENTER) + + btSizer = wx.FlexGridSizer(rows=2, cols=1, hgap=0, vgap=0) + #btSizer = wx.BoxSizer(wx.VERTICAL) + btSizer.Add(btClose ,0, flag = wx.ALL|wx.EXPAND, border = 1) + btSizer.Add(btComp ,0, flag = wx.ALL|wx.EXPAND, border = 1) + + row_sizer = wx.BoxSizer(wx.HORIZONTAL) + row_sizer.Add(wx.StaticText(self, -1, 'Tab:') , 0, wx.CENTER|wx.LEFT, 0) + row_sizer.Add(self.cbTabs , 0, wx.CENTER|wx.LEFT, 2) + row_sizer.Add(wx.StaticText(self, -1, 'Method:'), 0, wx.CENTER|wx.LEFT, 5) + row_sizer.Add(self.cbMethod , 0, wx.CENTER|wx.LEFT, 2) + row_sizer.Add(wx.StaticText(self, -1, 'Param:') , 0, wx.CENTER|wx.LEFT, 5) + row_sizer.Add(self.textAverageParam , 0, wx.CENTER|wx.LEFT|wx.RIGHT| wx.EXPAND, 2) + + vert_sizer = wx.BoxSizer(wx.VERTICAL) + vert_sizer.Add(self.lb ,0, flag =wx.ALIGN_LEFT|wx.TOP|wx.BOTTOM,border = 5) + vert_sizer.Add(row_sizer ,0, flag =wx.ALIGN_LEFT|wx.TOP|wx.BOTTOM,border = 5) + + self.sizer = wx.BoxSizer(wx.HORIZONTAL) + self.sizer.Add(btSizer ,0, flag = wx.LEFT ,border = 5) + self.sizer.Add(vert_sizer ,0, flag = wx.LEFT|wx.EXPAND,border = TOOL_BORDER) + self.SetSizer(self.sizer) + self.Bind(wx.EVT_COMBOBOX, self.onTabChange, self.cbTabs ) + + def onTabChange(self,event=None): + tabList = self.parent.selPanel.tabList + + def onApply(self,event=None): + try: + avgParam = float(self.textAverageParam.GetLineText(0)) + except: + raise Exception('Error: the averaging parameter needs to be an integer or a float') + iSel = self.cbTabs.GetSelection() + avgMethod = AVG_METHODS[self.cbMethod.GetSelection()] + tabList = self.parent.selPanel.tabList + mainframe = self.parent.mainframe + if iSel==0: + dfs, names, errors = tabList.radialAvg(avgMethod,avgParam) + mainframe.load_dfs(dfs,names,bAdd=True) + if len(errors)>0: + raise Exception('Error: The mask failed on some tables:\n\n'+'\n'.join(errors)) + else: + dfs, names = tabList.get(iSel-1).radialAvg(avgMethod,avgParam) + mainframe.load_dfs(dfs,names,bAdd=True) + + self.updateTabList() + + def updateTabList(self,event=None): + tabList = self.parent.selPanel.tabList + tabListNames = ['All opened tables']+tabList.getDisplayTabNames() + iSel=np.min([self.cbTabs.GetSelection(),len(tabListNames)]) + self.cbTabs.Clear() + [self.cbTabs.Append(tn) for tn in tabListNames] + self.cbTabs.SetSelection(iSel) + + +# --------------------------------------------------------------------------------} +# --- Curve Fitting +# --------------------------------------------------------------------------------{ +MODELS_EXAMPLE =[ + {'label':'User defined model', 'id':'eval:', + 'formula':'{a}*x**2 + {b}', + 'coeffs':None, + 'consts':None, + 'bounds':None }, + ] +MODELS_EXTRA =[ +# {'label':'Exponential decay', 'id':'eval:', +# 'formula':'{A}*exp(-{k}*x)+{B}', +# 'coeffs' :'k=1, A=1, B=0', +# 'consts' :None, +# 'bounds' :None}, +] + +class CurveFitToolPanel(GUIToolPanel): + def __init__(self, parent): + super(CurveFitToolPanel,self).__init__(parent) + + # Data + self.x = None + self.y_fit = None + + # GUI Objecst + btClose = self.getBtBitmap(self, 'Close','close', self.destroy) + btClear = self.getBtBitmap(self, 'Clear','sun', self.onClear) # DELETE + btAdd = self.getBtBitmap(self, 'Add','add' , self.onAdd) + btCompFit = self.getBtBitmap(self, 'Fit','check', self.onCurveFit) + btHelp = self.getBtBitmap(self, 'Help','help', self.onHelp) + + boldFont = self.GetFont().Bold() + lbOutputs = wx.StaticText(self, -1, 'Outputs') + lbInputs = wx.StaticText(self, -1, 'Inputs ') + lbOutputs.SetFont(boldFont) + lbInputs.SetFont(boldFont) + + self.textFormula = wx.TextCtrl(self, wx.ID_ANY, '') + self.textGuess = wx.TextCtrl(self, wx.ID_ANY, '') + self.textBounds = wx.TextCtrl(self, wx.ID_ANY, '') + self.textConstants = wx.TextCtrl(self, wx.ID_ANY, '') + + self.textFormulaNum = wx.TextCtrl(self, wx.ID_ANY, '', style=wx.TE_READONLY) + self.textCoeffs = wx.TextCtrl(self, wx.ID_ANY, '', style=wx.TE_READONLY) + self.textInfo = wx.TextCtrl(self, wx.ID_ANY, '', style=wx.TE_READONLY) + + + self.Models=copy.deepcopy(MODELS_EXAMPLE) + copy.deepcopy(FITTERS) + copy.deepcopy(MODELS) + copy.deepcopy(MODELS_EXTRA) + sModels=[d['label'] for d in self.Models] + + + self.cbModels = wx.ComboBox(self, choices=sModels, style=wx.CB_READONLY) + self.cbModels.SetSelection(0) + + btSizer = wx.FlexGridSizer(rows=3, cols=2, hgap=2, vgap=0) + btSizer.Add(btClose ,0,flag = wx.ALL|wx.EXPAND, border = 1) + btSizer.Add(btClear ,0,flag = wx.ALL|wx.EXPAND, border = 1) + btSizer.Add(btAdd ,0,flag = wx.ALL|wx.EXPAND, border = 1) + btSizer.Add(btCompFit ,0,flag = wx.ALL|wx.EXPAND, border = 1) + btSizer.Add(btHelp ,0,flag = wx.ALL|wx.EXPAND, border = 1) + + inputSizer = wx.FlexGridSizer(rows=5, cols=2, hgap=0, vgap=0) + inputSizer.Add(lbInputs ,0, flag=wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM,border = 1) + inputSizer.Add(self.cbModels ,1, flag=wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM|wx.EXPAND,border = 1) + inputSizer.Add(wx.StaticText(self, -1, 'Formula:') ,0, flag=wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM,border = 1) + inputSizer.Add(self.textFormula ,1, flag=wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM|wx.EXPAND,border = 1) + inputSizer.Add(wx.StaticText(self, -1, 'Guess:') ,0, flag=wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM,border = 1) + inputSizer.Add(self.textGuess ,1, flag=wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM|wx.EXPAND,border = 1) + inputSizer.Add(wx.StaticText(self, -1, 'Bounds:') ,0, flag=wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM,border = 1) + inputSizer.Add(self.textBounds ,1, flag=wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM|wx.EXPAND,border = 1) + inputSizer.Add(wx.StaticText(self, -1, 'Constants:'),0, flag=wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM,border = 1) + inputSizer.Add(self.textConstants ,1, flag=wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM|wx.EXPAND,border = 1) + inputSizer.AddGrowableCol(1,1) + + outputSizer = wx.FlexGridSizer(rows=5, cols=2, hgap=0, vgap=0) + outputSizer.Add(lbOutputs ,0 , flag=wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM,border = 1) + outputSizer.Add(wx.StaticText(self, -1, '') ,0 , flag=wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM,border = 1) + outputSizer.Add(wx.StaticText(self, -1, 'Formula:'),0 , flag=wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM,border = 1) + outputSizer.Add(self.textFormulaNum ,1 , flag=wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM|wx.EXPAND,border = 1) + outputSizer.Add(wx.StaticText(self, -1, 'Parameters:') ,0 , flag=wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM,border = 1) + outputSizer.Add(self.textCoeffs ,1 , flag=wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM|wx.EXPAND,border = 1) + outputSizer.Add(wx.StaticText(self, -1, 'Accuracy:') ,0 , flag=wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM,border = 1) + outputSizer.Add(self.textInfo ,1 , flag=wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM|wx.EXPAND,border = 1) + outputSizer.AddGrowableCol(1,0.5) + + horzSizer = wx.BoxSizer(wx.HORIZONTAL) + horzSizer.Add(inputSizer ,1.0, flag = wx.LEFT|wx.EXPAND,border = 2) + horzSizer.Add(outputSizer ,1.0, flag = wx.LEFT|wx.EXPAND,border = 9) + + vertSizer = wx.BoxSizer(wx.VERTICAL) +# vertSizer.Add(self.lbHelp ,0, flag = wx.LEFT ,border = 1) + vertSizer.Add(horzSizer ,1, flag = wx.LEFT|wx.EXPAND,border = 1) + + self.sizer = wx.BoxSizer(wx.HORIZONTAL) + self.sizer.Add(btSizer ,0, flag = wx.LEFT ,border = 1) +# self.sizer.Add(vertSizerCB ,0, flag = wx.LEFT ,border = 1) + self.sizer.Add(vertSizer ,1, flag = wx.EXPAND|wx.LEFT ,border = 1) + self.SetSizer(self.sizer) + + self.Bind(wx.EVT_COMBOBOX, self.onModelChange, self.cbModels) + + self.onModelChange() + + def onModelChange(self,event=None): + iModel = self.cbModels.GetSelection() + d = self.Models[iModel] + self.textFormula.SetEditable(True) + + if d['id'].find('fitter:')==0 : + self.textGuess.Enable(False) + self.textGuess.SetValue('') + self.textFormula.Enable(False) + self.textFormula.SetValue(d['formula']) + self.textBounds.Enable(False) + self.textBounds.SetValue('') + self.textConstants.Enable(True) + # NOTE: conversion to string works with list, and tuples, not numpy array + val = ', '.join([k+'='+str(v) for k,v in d['consts'].items()]) + self.textConstants.SetValue(val) + else: + # Formula + if d['id'].find('eval:')==0 : + self.textFormula.Enable(True) + self.textFormula.SetEditable(True) + else: + #self.textFormula.Enable(False) + self.textFormula.Enable(True) + self.textFormula.SetEditable(False) + self.textFormula.SetValue(d['formula']) + + # Guess + if d['coeffs'] is None: + self.textGuess.SetValue('') + else: + self.textGuess.SetValue(d['coeffs']) + + # Constants + if d['consts'] is None or len(d['consts'].strip())==0: + self.textConstants.Enable(False) + self.textConstants.SetValue('') + else: + self.textConstants.Enable(True) + self.textConstants.SetValue(d['consts']) + + # Bounds + self.textBounds.Enable(True) + if d['bounds'] is None or len(d['bounds'].strip())==0: + self.textBounds.SetValue('all=(-np.inf, np.inf)') + else: + self.textBounds.SetValue(d['bounds']) + + # Outputs + self.textFormulaNum.SetValue('(Click on Fit)') + self.textCoeffs.SetValue('') + self.textInfo.SetValue('') + + def onCurveFit(self,event=None): + self.x = None + self.y_fit = None + if len(self.parent.plotData)!=1: + Error(self,'Curve fitting tool only works with a single curve. Plot less data.') + return + PD =self.parent.plotData[0] + ax =self.parent.fig.axes[0] + # Restricting data to axes visible bounds on the x axis + xlim= ax.get_xlim() + b=np.logical_and(PD.x>=xlim[0], PD.x<=xlim[1]) + + iModel = self.cbModels.GetSelection() + d = self.Models[iModel] + + if d['id'].find('fitter:')==0 : + sFunc=d['id'] + p0=None + bounds=None + fun_kwargs=extract_key_miscnum(self.textConstants.GetLineText(0).replace('np.inf','inf')) + else: + # Formula + sFunc=d['id'] + if sFunc=='eval:': + sFunc+=self.textFormula.GetLineText(0) + # Bounds + bounds=self.textBounds.GetLineText(0).replace('np.inf','inf') + # Guess + p0=self.textGuess.GetLineText(0).replace('np.inf','inf') + fun_kwargs=extract_key_num(self.textConstants.GetLineText(0).replace('np.inf','inf')) + #print('>>> Model fit sFunc :',sFunc ) + #print('>>> Model fit p0 :',p0 ) + #print('>>> Model fit bounds:',bounds ) + #print('>>> Model fit kwargs:',fun_kwargs) + # Performing fit + y_fit, pfit, fitter = model_fit(sFunc, PD.x[b], PD.y[b], p0=p0, bounds=bounds,**fun_kwargs) + + formatter = lambda x: pretty_num_short(x, digits=3) + formula_num = fitter.formula_num(fmt=formatter) + # Update info + self.textFormulaNum.SetValue(formula_num) + self.textCoeffs.SetValue(', '.join(['{}={:s}'.format(k,formatter(v)) for k,v in fitter.model['coeffs'].items()])) + self.textInfo.SetValue('R2 = {:.3f} '.format(fitter.model['R2'])) + + # Saving + d['formula'] = self.textFormula.GetLineText(0) + d['bounds'] = self.textBounds.GetLineText(0).replace('np.inf','inf') + d['coeffs'] = self.textGuess.GetLineText(0).replace('np.inf','inf') + if d['id'].find('fitter:')==0 : + d['consts'], _ = set_common_keys(d['consts'],fun_kwargs) + else: + d['consts']= self.textConstants.GetLineText(0).replace('np.inf','inf') + + + # Plot + ax=self.parent.fig.axes[0] + ax.plot(PD.x[b],y_fit,'o', ms=4) + self.parent.canvas.draw() + + self.x=PD.x[b] + self.y_fit=y_fit + self.sx=PD.sx + self.sy=PD.sy + + def onClear(self,event=None): + self.parent.load_and_draw() # DATA HAS CHANGED + self.onModelChange() + + def onAdd(self,event=None): + name='model_fit' + if self.x is not None and self.y_fit is not None: + df=pd.DataFrame({self.sx:self.x, self.sy:self.y_fit}) + self.parent.mainframe.load_df(df,name,bAdd=True) + + def onHelp(self,event=None): + Info(self,"""Curve fitting is still in beta. + +To perform a curve fit, adjusts the "Inputs section on the left": +- Select a predefined equation to fit, using the scrolldown menu. +- Adjust the initial gues for the parameters (if wanted) +- (Only for few models: set constants values) +- Click on "Fit" + +If you select a user-defined model: +- Equation parameters are specified using curly brackets +- Numpy functions are available using "np" + +Buttons: +- Clear: remove the fit from the plot +- Add: add the fit data to the list of tables (can then be exported) + +""") + + + +TOOLS={ + 'LogDec': LogDecToolPanel, + 'Outlier': OutlierToolPanel, + 'Filter': FilterToolPanel, + 'Resample': ResampleToolPanel, + 'Mask': MaskToolPanel, + 'FASTRadialAverage': RadialToolPanel, + 'CurveFitting': CurveFitToolPanel, +} diff --git a/pydatview/Tables.py b/pydatview/Tables.py index adc4b40..0c8732c 100644 --- a/pydatview/Tables.py +++ b/pydatview/Tables.py @@ -1,627 +1,627 @@ -import numpy as np -import os.path -from dateutil import parser -import pandas as pd -import pydatview.fast.fastlib as fastlib -import pydatview.fast.fastfarm as fastfarm -try: - from .common import no_unit, ellude_common, getDt -except: - from common import no_unit, ellude_common, getDt -try: - import weio # File Formats and File Readers -except: - print('') - print('Error: the python package `weio` was not imported successfully.\n') - print('Most likely the submodule `weio` was not cloned with `pyDatView`') - print('Type the following command to retrieve it:\n') - print(' git submodule update --init --recursive\n') - print('Alternatively re-clone this repository into a separate folder:\n') - print(' git clone --recurse-submodules https://github.com/ebranlard/pyDatView\n') - sys.exit(-1) - - - -# --------------------------------------------------------------------------------} -# --- TabList -# --------------------------------------------------------------------------------{ -class TableList(object): # todo inherit list - def __init__(self,tabs=[]): - self._tabs=tabs - self.Naming='Ellude' - - def append(self,t): - if isinstance(t,list): - self._tabs += t - else: - self._tabs += [t] - - - def from_dataframes(self, dataframes=[], names=[], bAdd=False): - if not bAdd: - self.clean() # TODO figure it out - # Returning a list of tables - for df,name in zip(dataframes, names): - if df is not None: - self.append(Table(data=df, name=name)) - - def load_tables_from_files(self, filenames=[], fileformat=None, bAdd=False): - """ load multiple files, only trigger the plot at the end """ - if not bAdd: - self.clean() # TODO figure it out - warnList=[] - for f in filenames: - if f in self.unique_filenames: - warnList.append('Warn: Cannot add a file already opened ' + f) - elif len(f)==0: - pass - # warn+= 'Warn: an empty filename was skipped' +'\n' - else: - tabs, warnloc = self._load_file_tabs(f,fileformat=fileformat) - if len(warnloc)>0: - warnList.append(warnloc) - self.append(tabs) - - return warnList - - def _load_file_tabs(self,filename,fileformat=None): - """ load a single file, adds table, and potentially trigger plotting """ - # Returning a list of tables - tabs=[] - warn='' - if not os.path.isfile(filename): - warn = 'Error: File not found: `'+filename+'`\n' - return tabs, warn - try: - F = weio.read(filename,fileformat = fileformat) - dfs = F.toDataFrame() - except weio.FileNotFoundError as e: - warn = 'Error: A file was not found!\n\n While opening:\n\n {}\n\n the following file was not found:\n\n {}\n'.format(filename, e.filename) - except IOError: - warn = 'Error: IO Error thrown while opening file: '+filename+'\n' - except MemoryError: - warn='Error: Insufficient memory!\n\nFile: '+filename+'\n\nTry closing and reopening the program, or use a 64 bit version of this program (i.e. of python).\n' - except weio.EmptyFileError: - warn='Error: File empty!\n\nFile is empty: '+filename+'\n\nOpen a different file.\n' - except weio.FormatNotDetectedError: - warn='Error: File format not detected!\n\nFile: '+filename+'\n\nUse an explicit file-format from the list\n' - except weio.WrongFormatError as e: - warn='Error: Wrong file format!\n\nFile: '+filename+'\n\n' \ - 'The file parser for the selected format failed to open the file.\n\n'+ \ - 'The reported error was:\n'+e.args[0]+'\n\n' + \ - 'Double-check your file format and report this error if you think it''s a bug.\n' - except weio.BrokenFormatError as e: - warn = 'Error: Inconsistency in the file format!\n\nFile: '+filename+'\n\n' \ - 'The reported error was:\n\n'+e.args[0]+'\n\n' + \ - 'Double-check your file format and report this error if you think it''s a bug.' - except: - raise - if len(warn)>0: - return tabs, warn - - if dfs is None: - pass - elif not isinstance(dfs,dict): - if len(dfs)>0: - tabs=[Table(data=dfs, filename=filename, fileformat=F.formatName())] - else: - for k in list(dfs.keys()): - if len(dfs[k])>0: - tabs.append(Table(data=dfs[k], name=str(k), filename=filename, fileformat=F.formatName())) - if len(tabs)<=0: - warn='Warn: No dataframe found in file: '+filename+'\n' - return tabs, warn - - def getTabs(self): - # TODO remove me later - return self._tabs - - def len(self): - return len(self._tabs) - - def haveSameColumns(self,I=None): - if I is None: - I=list(range(len(self._tabs))) - A=[len(self._tabs[i].columns)==len(self._tabs[I[0]].columns) for i in I ] - if all(A): - B=[self._tabs[i].columns_clean==self._tabs[I[0]].columns_clean for i in I] #list comparison - return all(B) - else: - return False - - def renameTable(self, iTab, newName): - oldName = self._tabs[iTab].name - if newName in [t.name for t in self._tabs]: - raise Exception('Error: This table already exist, choose a different name.') - # Renaming table - self._tabs[iTab].rename(newName) - return oldName - - def sort(self, method='byName'): - if method=='byName': - tabnames_display=self.getDisplayTabNames() - self._tabs = [t for _,t in sorted(zip(tabnames_display,self._tabs))] - else: - raise Exception('Sorting method unknown: `{}`'.format(method)) - - def deleteTabs(self, I): - self._tabs = [t for i,t in enumerate(self._tabs) if i not in I] - - def setActiveNames(self,names): - for t,tn in zip(self._tabs,names): - t.active_name=tn - - def setNaming(self,naming): - self.Naming=naming - - def getDisplayTabNames(self): - if self.Naming=='Ellude': - # Temporary hack, using last names if all last names are unique - names = [t.raw_name for t in self._tabs] - last_names=[n.split('|')[-1] for n in names] - if len(np.unique(last_names)) == len(names): - return ellude_common(last_names) - else: - return ellude_common(names) - elif self.Naming=='FileNames': - return [os.path.splitext(os.path.basename(t.filename))[0] for t in self._tabs] - else: - raise Exception('Table naming unknown: {}'.format(self.Naming)) - - @property - def tabNames(self): - return [t.name for t in self._tabs] - - @property - def filenames(self): - return [t.filename for t in self._tabs] - - @property - def unique_filenames(self): - return list(set([t.filename for t in self._tabs])) - - def clean(self): - del self._tabs - self._tabs=[] - - def __repr__(self): - return '\n'.join([t.__repr__() for t in self._tabs]) - - # --- Mask related - @property - def maskStrings(self): - return [t.maskString for t in self._tabs] - - @property - def commonMaskString(self): - maskStrings=set(self.maskStrings) - if len(maskStrings) == 1: - return next(iter(maskStrings)) - else: - return '' - - def clearCommonMask(self): - for t in self._tabs: - t.clearMask() - - def applyCommonMaskString(self,maskString,bAdd=True): - dfs_new = [] - names_new = [] - errors=[] - for i,t in enumerate(self._tabs): - try: - df_new, name_new = t.applyMaskString(maskString, bAdd=bAdd) - if df_new is not None: - # we don't append when string is empty - dfs_new.append(df_new) - names_new.append(name_new) - except: - errors.append('Mask failed for table: '+t.active_name) # TODO - - return dfs_new, names_new, errors - - def applyResampling(self,iCol,sampDict,bAdd=True): - dfs_new = [] - names_new = [] - errors=[] - for i,t in enumerate(self._tabs): -# try: - df_new, name_new = t.applyResampling(iCol,sampDict, bAdd=bAdd) - if df_new is not None: - # we don't append when string is empty - dfs_new.append(df_new) - names_new.append(name_new) -# except: -# errors.append('Resampling failed for table: '+t.active_name) # TODO - - return dfs_new, names_new, errors - - - - - # --- Radial average related - def radialAvg(self,avgMethod,avgParam): - dfs_new = [] - names_new = [] - errors=[] - for i,t in enumerate(self._tabs): - dfs, names = t.radialAvg(avgMethod,avgParam) - for df,n in zip(dfs,names): - if df is not None: - dfs_new.append(df) - names_new.append(n) - return dfs_new, names_new, errors - - - # --- Element--related functions - def get(self,i): - return self._tabs[i] - - - -# --------------------------------------------------------------------------------} -# --- Table -# --------------------------------------------------------------------------------{ -# TODO sort out the naming -# -# Main naming concepts: -# name : -# active_name : -# raw_name : -# filename : -class Table(object): - def __init__(self,data=None,name='',filename='',columns=[],fileformat=''): - # Default init - self.maskString='' - self.mask=None - - self.filename = filename - self.fileformat = fileformat - self.formulas = [] - - if not isinstance(data,pd.DataFrame): - # ndarray?? - raise NotImplementedError('Tables that are not dataframe not implemented.') - else: - # --- Pandas DataFrame - self.data = data - self.columns = self.columnsFromDF(data) - # --- Trying to figure out how to name this table - if name is None or len(str(name))==0: - if data.columns.name is not None: - name=data.columns.name - - self.setupName(name=str(name)) - - self.convertTimeColumns() - - - def setupName(self,name=''): - # Creates a "codename": path | basename | name | ext - splits=[] - ext='' - if len(self.filename)>0: - base_dir = os.path.dirname(self.filename) - if len(base_dir)==0: - base_dir= os.getcwd() - self.filename=os.path.join(base_dir,self.filename) - splits.append(base_dir.replace('/','|').replace('\\','|')) - basename,ext=os.path.splitext(os.path.basename(self.filename)) - if len(basename)>0: - splits.append(basename) - if name is not None and len(name)>0: - splits.append(name) - #if len(ext)>0: - # splits.append(ext) - self.extension=ext - name='|'.join(splits) - if len(name)==0: - name='default' - self.name=name - self.active_name=self.name - - - def __repr__(self): - return 'Tab {} ({}x{}) (raw: {}, active: {}, file: {})'.format(self.name,self.nCols,self.nRows,self.raw_name, self.active_name,self.filename) - - def columnsFromDF(self,df): - return [s.replace('_',' ') for s in df.columns.values.astype(str)] - - - def clearMask(self): - self.maskString='' - self.mask=None - - def addLabelToName(self,label): - print('raw_name',self.raw_name) - raw_name=self.raw_name - sp=raw_name.split('|') - print(sp) - - def applyMaskString(self,maskString,bAdd=True): - df = self.data - Index = np.array(range(df.shape[0])) - sMask=maskString.replace('{Index}','Index') - for i,c in enumerate(self.columns): - c_no_unit = no_unit(c).strip() - c_in_df = df.columns[i] - # TODO sort out the mess with asarray (introduced to have and/or - # as array won't work with date comparison - # NOTE: using iloc to avoid duplicates column issue - if isinstance(df.iloc[0,i], pd._libs.tslibs.timestamps.Timestamp): - sMask=sMask.replace('{'+c_no_unit+'}','df[\''+c_in_df+'\']') - else: - sMask=sMask.replace('{'+c_no_unit+'}','np.asarray(df[\''+c_in_df+'\'])') - df_new = None - name_new = None - if len(sMask.strip())>0 and sMask.strip().lower()!='no mask': - try: - mask = np.asarray(eval(sMask)) - if bAdd: - df_new = df[mask] - name_new=self.raw_name+'_masked' - else: - self.mask=mask - self.maskString=maskString - except: - raise Exception('Error: The mask failed for table: '+self.name) - return df_new, name_new - - def applyResampling(self,iCol,sampDict,bAdd=True): - from pydatview.tools.signal import applySamplerDF - if iCol==0: - raise Exception('Cannot resample based on index') - colName=self.data.columns[iCol-1] - df_new =applySamplerDF(self.data, colName, sampDict=sampDict) - df_new - if bAdd: - name_new=self.raw_name+'_resampled' - else: - name_new=None - self.data=df_new - return df_new, name_new - - - def radialAvg(self,avgMethod, avgParam): - df = self.data - base,out_ext = os.path.splitext(self.filename) - - # --- Detect if it's a FAST Farm file - sCols = ''.join(df.columns) - if sCols.find('WkDf')>1 or sCols.find('CtT')>0: - # --- FAST FARM files - Files=[base+ext for ext in ['.fstf','.FSTF','.Fstf','.fmas','.FMAS','.Fmas'] if os.path.exists(base+ext)] - if len(Files)==0: - fst_in=None - #raise Exception('Error: No .fstf file found with name: '+base+'.fstf') - else: - fst_in=Files[0] - - dfRad,_,dfDiam = fastfarm.spanwisePostProFF(fst_in,avgMethod=avgMethod,avgParam=avgParam,D=1,df=df) - dfs_new = [dfRad,dfDiam] - names_new=[self.raw_name+'_rad',self.raw_name+'_diam'] - else: - # --- FAST files - - # HACK for AD file to find the right .fst file - iDotAD=base.lower().find('.ad') - if iDotAD>1: - base=base[:iDotAD] - # - Files=[base+ext for ext in ['.fst','.FST','.Fst','.dvr','.Dvr','.DVR'] if os.path.exists(base+ext)] - if len(Files)==0: - fst_in=None - #raise Exception('Error: No .fst file found with name: '+base+'.fst') - else: - fst_in=Files[0] - - - dfRadED, dfRadAD, dfRadBD= fastlib.spanwisePostPro(fst_in, avgMethod=avgMethod, avgParam=avgParam, out_ext=out_ext, df = self.data) - dfs_new = [dfRadAD, dfRadED, dfRadBD] - names_new=[self.raw_name+'_AD', self.raw_name+'_ED', self.raw_name+'_BD'] - return dfs_new, names_new - - def convertTimeColumns(self): - if len(self.data)>0: - for i,c in enumerate(self.data.columns.values): - y = self.data.iloc[:,i] - if y.dtype == np.object: - if isinstance(y.values[0], str): - # tring to convert to date - try: - parser.parse(y.values[0]) - isDate=True - except: - if y.values[0]=='NaT': - isDate=True - else: - isDate=False - if isDate: - try: - self.data[c]=pd.to_datetime(self.data[c].values).to_pydatetime() - print('Column {} converted to datetime'.format(c)) - except: - # Happens if values are e.g. "Monday, Tuesday" - print('Conversion to datetime failed, column {} inferred as string'.format(c)) - else: - print('Column {} inferred as string'.format(c)) - elif isinstance(y.values[0], (float, int)): - try: - self.data[c]=self.data[c].astype(float) - print('Column {} converted to float (likely nan)'.format(c)) - except: - self.data[c]=self.data[c].astype(str) - print('Column {} inferred and converted to string'.format(c)) - else : - print('>> Unknown type:',type(y.values[0])) - #print(self.data.dtypes) - - def renameColumn(self,iCol,newName): - self.columns[iCol]=newName - self.data.columns.values[iCol]=newName - - def deleteColumns(self,ICol): - """ Delete columns by index, not column names which can have duplicates""" - IKeep =[i for i in np.arange(self.data.shape[1]) if i not in ICol] - self.data = self.data.iloc[:, IKeep] # Drop won't work for duplicates - for i in sorted(ICol, reverse=True): - del(self.columns[i]) - for f in self.formulas: - if f['pos'] == (i + 1): - self.formulas.remove(f) - break - for f in self.formulas: - if f['pos'] > (i + 1): - f['pos'] = f['pos'] - 1 - - def rename(self,new_name): - self.name='>'+new_name - - def addColumn(self,sNewName,NewCol,i=-1,sFormula=''): - if i<0: - i=self.data.shape[1] - self.data.insert(int(i),sNewName,NewCol) - self.columns=self.columnsFromDF(self.data) - for f in self.formulas: - if f['pos'] > i: - f['pos'] = f['pos'] + 1 - self.formulas.append({'pos': i+1, 'formula': sFormula, 'name': sNewName}) - - def setColumn(self,sNewName,NewCol,i,sFormula=''): - if i<1: - raise ValueError('Cannot set column at position ' + str(i)) - self.data = self.data.drop(columns=self.data.columns[i-1]) - self.data.insert(int(i-1),sNewName,NewCol) - self.columns=self.columnsFromDF(self.data) - for f in self.formulas: - if f['pos'] == i: - f['name'] = sNewName - f['formula'] = sFormula - - def getColumn(self,i): - """ Return column of data, where i=0 is the index column - If a mask exist, the mask is applied - """ - if i <= 0 : - x = np.array(range(self.data.shape[0])) - if self.mask is not None: - x=x[self.mask] - - c = None - isString = False - isDate = False - else: - if self.mask is not None: - c = self.data.iloc[self.mask, i-1] - x = self.data.iloc[self.mask, i-1].values - else: - c = self.data.iloc[:, i-1] - x = self.data.iloc[:, i-1].values - - isString = c.dtype == np.object and isinstance(c.values[0], str) - if isString: - x=x.astype(str) - isDate = np.issubdtype(c.dtype, np.datetime64) - if isDate: - dt=getDt(x) - if dt>1: - x=x.astype('datetime64[s]') - else: - x=x.astype('datetime64') - return x,isString,isDate,c - - - - def evalFormula(self,sFormula): - df = self.data - Index = np.array(range(df.shape[0])) - sFormula=sFormula.replace('{Index}','Index') - for i,c in enumerate(self.columns): - c_no_unit = no_unit(c).strip() - c_in_df = df.columns[i] - sFormula=sFormula.replace('{'+c_no_unit+'}','df[\''+c_in_df+'\']') - #print(sFormula) - try: - NewCol=eval(sFormula) - return NewCol - except: - return None - - def addColumnByFormula(self,sNewName,sFormula,i=-1): - NewCol=self.evalFormula(sFormula) - if NewCol is None: - return False - else: - self.addColumn(sNewName,NewCol,i,sFormula) - return True - - def setColumnByFormula(self,sNewName,sFormula,i=-1): - NewCol=self.evalFormula(sFormula) - if NewCol is None: - return False - else: - self.setColumn(sNewName,NewCol,i,sFormula) - return True - - - def export(self,path): - if isinstance(self.data, pd.DataFrame): - try: - self.data.to_csv(path,sep=',',index=False) #python3 - except: - self.data.to_csv(path,sep=str(u',').encode('utf-8'),index=False) #python 2. - else: - raise NotImplementedError('Export of data that is not a dataframe') - - - - @property - def basename(self): - return os.path.splitext(os.path.basename(self.filename))[0] - - @property - def shapestring(self): - return '{}x{}'.format(self.nCols, self.nRows) - - @property - def shape(self): - return (self.nRows, self.nCols) - - - @property - def columns_clean(self): - return [no_unit(s) for s in self.columns] - - @property - def name(self): - if len(self.__name)<=0: - return '' - if self.__name[0]=='>': - return self.__name[1:] - else: - return self.__name - - @property - def raw_name(self): - return self.__name - - @name.setter - def name(self,new_name): - self.__name=new_name - - @property - def nCols(self): - return len(self.columns) - - @property - def nRows(self): - return len(self.data.iloc[:,0]) # TODO if not panda - - -if __name__ == '__main__': - import pandas as pd; - from Tables import Table - import numpy as np - - def OnTabPopup(event): - self.PopupMenu(TablePopup(self,selPanel.tabPanel.lbTab), event.GetPosition()) +import numpy as np +import os.path +from dateutil import parser +import pandas as pd +try: + from .common import no_unit, ellude_common, getDt +except: + from common import no_unit, ellude_common, getDt +try: + import weio # File Formats and File Readers +except: + print('') + print('Error: the python package `weio` was not imported successfully.\n') + print('Most likely the submodule `weio` was not cloned with `pyDatView`') + print('Type the following command to retrieve it:\n') + print(' git submodule update --init --recursive\n') + print('Alternatively re-clone this repository into a separate folder:\n') + print(' git clone --recurse-submodules https://github.com/ebranlard/pyDatView\n') + sys.exit(-1) + + + +# --------------------------------------------------------------------------------} +# --- TabList +# --------------------------------------------------------------------------------{ +class TableList(object): # todo inherit list + def __init__(self,tabs=[]): + self._tabs=tabs + self.Naming='Ellude' + + def append(self,t): + if isinstance(t,list): + self._tabs += t + else: + self._tabs += [t] + + + def from_dataframes(self, dataframes=[], names=[], bAdd=False): + if not bAdd: + self.clean() # TODO figure it out + # Returning a list of tables + for df,name in zip(dataframes, names): + if df is not None: + self.append(Table(data=df, name=name)) + + def load_tables_from_files(self, filenames=[], fileformat=None, bAdd=False): + """ load multiple files, only trigger the plot at the end """ + if not bAdd: + self.clean() # TODO figure it out + warnList=[] + for f in filenames: + if f in self.unique_filenames: + warnList.append('Warn: Cannot add a file already opened ' + f) + elif len(f)==0: + pass + # warn+= 'Warn: an empty filename was skipped' +'\n' + else: + tabs, warnloc = self._load_file_tabs(f,fileformat=fileformat) + if len(warnloc)>0: + warnList.append(warnloc) + self.append(tabs) + + return warnList + + def _load_file_tabs(self,filename,fileformat=None): + """ load a single file, adds table, and potentially trigger plotting """ + # Returning a list of tables + tabs=[] + warn='' + if not os.path.isfile(filename): + warn = 'Error: File not found: `'+filename+'`\n' + return tabs, warn + try: + F = weio.read(filename,fileformat = fileformat) + dfs = F.toDataFrame() + except weio.FileNotFoundError as e: + warn = 'Error: A file was not found!\n\n While opening:\n\n {}\n\n the following file was not found:\n\n {}\n'.format(filename, e.filename) + except IOError: + warn = 'Error: IO Error thrown while opening file: '+filename+'\n' + except MemoryError: + warn='Error: Insufficient memory!\n\nFile: '+filename+'\n\nTry closing and reopening the program, or use a 64 bit version of this program (i.e. of python).\n' + except weio.EmptyFileError: + warn='Error: File empty!\n\nFile is empty: '+filename+'\n\nOpen a different file.\n' + except weio.FormatNotDetectedError: + warn='Error: File format not detected!\n\nFile: '+filename+'\n\nUse an explicit file-format from the list\n' + except weio.WrongFormatError as e: + warn='Error: Wrong file format!\n\nFile: '+filename+'\n\n' \ + 'The file parser for the selected format failed to open the file.\n\n'+ \ + 'The reported error was:\n'+e.args[0]+'\n\n' + \ + 'Double-check your file format and report this error if you think it''s a bug.\n' + except weio.BrokenFormatError as e: + warn = 'Error: Inconsistency in the file format!\n\nFile: '+filename+'\n\n' \ + 'The reported error was:\n\n'+e.args[0]+'\n\n' + \ + 'Double-check your file format and report this error if you think it''s a bug.' + except: + raise + if len(warn)>0: + return tabs, warn + + if dfs is None: + pass + elif not isinstance(dfs,dict): + if len(dfs)>0: + tabs=[Table(data=dfs, filename=filename, fileformat=F.formatName())] + else: + for k in list(dfs.keys()): + if len(dfs[k])>0: + tabs.append(Table(data=dfs[k], name=str(k), filename=filename, fileformat=F.formatName())) + if len(tabs)<=0: + warn='Warn: No dataframe found in file: '+filename+'\n' + return tabs, warn + + def getTabs(self): + # TODO remove me later + return self._tabs + + def len(self): + return len(self._tabs) + + def haveSameColumns(self,I=None): + if I is None: + I=list(range(len(self._tabs))) + A=[len(self._tabs[i].columns)==len(self._tabs[I[0]].columns) for i in I ] + if all(A): + B=[self._tabs[i].columns_clean==self._tabs[I[0]].columns_clean for i in I] #list comparison + return all(B) + else: + return False + + def renameTable(self, iTab, newName): + oldName = self._tabs[iTab].name + if newName in [t.name for t in self._tabs]: + raise Exception('Error: This table already exist, choose a different name.') + # Renaming table + self._tabs[iTab].rename(newName) + return oldName + + def sort(self, method='byName'): + if method=='byName': + tabnames_display=self.getDisplayTabNames() + self._tabs = [t for _,t in sorted(zip(tabnames_display,self._tabs))] + else: + raise Exception('Sorting method unknown: `{}`'.format(method)) + + def deleteTabs(self, I): + self._tabs = [t for i,t in enumerate(self._tabs) if i not in I] + + def setActiveNames(self,names): + for t,tn in zip(self._tabs,names): + t.active_name=tn + + def setNaming(self,naming): + self.Naming=naming + + def getDisplayTabNames(self): + if self.Naming=='Ellude': + # Temporary hack, using last names if all last names are unique + names = [t.raw_name for t in self._tabs] + last_names=[n.split('|')[-1] for n in names] + if len(np.unique(last_names)) == len(names): + return ellude_common(last_names) + else: + return ellude_common(names) + elif self.Naming=='FileNames': + return [os.path.splitext(os.path.basename(t.filename))[0] for t in self._tabs] + else: + raise Exception('Table naming unknown: {}'.format(self.Naming)) + + @property + def tabNames(self): + return [t.name for t in self._tabs] + + @property + def filenames(self): + return [t.filename for t in self._tabs] + + @property + def unique_filenames(self): + return list(set([t.filename for t in self._tabs])) + + def clean(self): + del self._tabs + self._tabs=[] + + def __repr__(self): + return '\n'.join([t.__repr__() for t in self._tabs]) + + # --- Mask related + @property + def maskStrings(self): + return [t.maskString for t in self._tabs] + + @property + def commonMaskString(self): + maskStrings=set(self.maskStrings) + if len(maskStrings) == 1: + return next(iter(maskStrings)) + else: + return '' + + def clearCommonMask(self): + for t in self._tabs: + t.clearMask() + + def applyCommonMaskString(self,maskString,bAdd=True): + dfs_new = [] + names_new = [] + errors=[] + for i,t in enumerate(self._tabs): + try: + df_new, name_new = t.applyMaskString(maskString, bAdd=bAdd) + if df_new is not None: + # we don't append when string is empty + dfs_new.append(df_new) + names_new.append(name_new) + except: + errors.append('Mask failed for table: '+t.active_name) # TODO + + return dfs_new, names_new, errors + + def applyResampling(self,iCol,sampDict,bAdd=True): + dfs_new = [] + names_new = [] + errors=[] + for i,t in enumerate(self._tabs): +# try: + df_new, name_new = t.applyResampling(iCol,sampDict, bAdd=bAdd) + if df_new is not None: + # we don't append when string is empty + dfs_new.append(df_new) + names_new.append(name_new) +# except: +# errors.append('Resampling failed for table: '+t.active_name) # TODO + + return dfs_new, names_new, errors + + + + + # --- Radial average related + def radialAvg(self,avgMethod,avgParam): + dfs_new = [] + names_new = [] + errors=[] + for i,t in enumerate(self._tabs): + dfs, names = t.radialAvg(avgMethod,avgParam) + for df,n in zip(dfs,names): + if df is not None: + dfs_new.append(df) + names_new.append(n) + return dfs_new, names_new, errors + + + # --- Element--related functions + def get(self,i): + return self._tabs[i] + + + +# --------------------------------------------------------------------------------} +# --- Table +# --------------------------------------------------------------------------------{ +# TODO sort out the naming +# +# Main naming concepts: +# name : +# active_name : +# raw_name : +# filename : +class Table(object): + def __init__(self,data=None,name='',filename='',columns=[],fileformat=''): + # Default init + self.maskString='' + self.mask=None + + self.filename = filename + self.fileformat = fileformat + self.formulas = [] + + if not isinstance(data,pd.DataFrame): + # ndarray?? + raise NotImplementedError('Tables that are not dataframe not implemented.') + else: + # --- Pandas DataFrame + self.data = data + self.columns = self.columnsFromDF(data) + # --- Trying to figure out how to name this table + if name is None or len(str(name))==0: + if data.columns.name is not None: + name=data.columns.name + + self.setupName(name=str(name)) + + self.convertTimeColumns() + + + def setupName(self,name=''): + # Creates a "codename": path | basename | name | ext + splits=[] + ext='' + if len(self.filename)>0: + base_dir = os.path.dirname(self.filename) + if len(base_dir)==0: + base_dir= os.getcwd() + self.filename=os.path.join(base_dir,self.filename) + splits.append(base_dir.replace('/','|').replace('\\','|')) + basename,ext=os.path.splitext(os.path.basename(self.filename)) + if len(basename)>0: + splits.append(basename) + if name is not None and len(name)>0: + splits.append(name) + #if len(ext)>0: + # splits.append(ext) + self.extension=ext + name='|'.join(splits) + if len(name)==0: + name='default' + self.name=name + self.active_name=self.name + + + def __repr__(self): + return 'Tab {} ({}x{}) (raw: {}, active: {}, file: {})'.format(self.name,self.nCols,self.nRows,self.raw_name, self.active_name,self.filename) + + def columnsFromDF(self,df): + return [s.replace('_',' ') for s in df.columns.values.astype(str)] + + + def clearMask(self): + self.maskString='' + self.mask=None + + def addLabelToName(self,label): + print('raw_name',self.raw_name) + raw_name=self.raw_name + sp=raw_name.split('|') + print(sp) + + def applyMaskString(self,maskString,bAdd=True): + df = self.data + Index = np.array(range(df.shape[0])) + sMask=maskString.replace('{Index}','Index') + for i,c in enumerate(self.columns): + c_no_unit = no_unit(c).strip() + c_in_df = df.columns[i] + # TODO sort out the mess with asarray (introduced to have and/or + # as array won't work with date comparison + # NOTE: using iloc to avoid duplicates column issue + if isinstance(df.iloc[0,i], pd._libs.tslibs.timestamps.Timestamp): + sMask=sMask.replace('{'+c_no_unit+'}','df[\''+c_in_df+'\']') + else: + sMask=sMask.replace('{'+c_no_unit+'}','np.asarray(df[\''+c_in_df+'\'])') + df_new = None + name_new = None + if len(sMask.strip())>0 and sMask.strip().lower()!='no mask': + try: + mask = np.asarray(eval(sMask)) + if bAdd: + df_new = df[mask] + name_new=self.raw_name+'_masked' + else: + self.mask=mask + self.maskString=maskString + except: + raise Exception('Error: The mask failed for table: '+self.name) + return df_new, name_new + + def applyResampling(self,iCol,sampDict,bAdd=True): + from pydatview.tools.signal import applySamplerDF + if iCol==0: + raise Exception('Cannot resample based on index') + colName=self.data.columns[iCol-1] + df_new =applySamplerDF(self.data, colName, sampDict=sampDict) + df_new + if bAdd: + name_new=self.raw_name+'_resampled' + else: + name_new=None + self.data=df_new + return df_new, name_new + + + def radialAvg(self,avgMethod, avgParam): + import pydatview.fast.fastlib as fastlib + import pydatview.fast.fastfarm as fastfarm + df = self.data + base,out_ext = os.path.splitext(self.filename) + + # --- Detect if it's a FAST Farm file + sCols = ''.join(df.columns) + if sCols.find('WkDf')>1 or sCols.find('CtT')>0: + # --- FAST FARM files + Files=[base+ext for ext in ['.fstf','.FSTF','.Fstf','.fmas','.FMAS','.Fmas'] if os.path.exists(base+ext)] + if len(Files)==0: + fst_in=None + #raise Exception('Error: No .fstf file found with name: '+base+'.fstf') + else: + fst_in=Files[0] + + dfRad,_,dfDiam = fastfarm.spanwisePostProFF(fst_in,avgMethod=avgMethod,avgParam=avgParam,D=1,df=df) + dfs_new = [dfRad,dfDiam] + names_new=[self.raw_name+'_rad',self.raw_name+'_diam'] + else: + # --- FAST files + + # HACK for AD file to find the right .fst file + iDotAD=base.lower().find('.ad') + if iDotAD>1: + base=base[:iDotAD] + # + Files=[base+ext for ext in ['.fst','.FST','.Fst','.dvr','.Dvr','.DVR'] if os.path.exists(base+ext)] + if len(Files)==0: + fst_in=None + #raise Exception('Error: No .fst file found with name: '+base+'.fst') + else: + fst_in=Files[0] + + + dfRadED, dfRadAD, dfRadBD= fastlib.spanwisePostPro(fst_in, avgMethod=avgMethod, avgParam=avgParam, out_ext=out_ext, df = self.data) + dfs_new = [dfRadAD, dfRadED, dfRadBD] + names_new=[self.raw_name+'_AD', self.raw_name+'_ED', self.raw_name+'_BD'] + return dfs_new, names_new + + def convertTimeColumns(self): + if len(self.data)>0: + for i,c in enumerate(self.data.columns.values): + y = self.data.iloc[:,i] + if y.dtype == np.object: + if isinstance(y.values[0], str): + # tring to convert to date + try: + parser.parse(y.values[0]) + isDate=True + except: + if y.values[0]=='NaT': + isDate=True + else: + isDate=False + if isDate: + try: + self.data[c]=pd.to_datetime(self.data[c].values).to_pydatetime() + print('Column {} converted to datetime'.format(c)) + except: + # Happens if values are e.g. "Monday, Tuesday" + print('Conversion to datetime failed, column {} inferred as string'.format(c)) + else: + print('Column {} inferred as string'.format(c)) + elif isinstance(y.values[0], (float, int)): + try: + self.data[c]=self.data[c].astype(float) + print('Column {} converted to float (likely nan)'.format(c)) + except: + self.data[c]=self.data[c].astype(str) + print('Column {} inferred and converted to string'.format(c)) + else : + print('>> Unknown type:',type(y.values[0])) + #print(self.data.dtypes) + + def renameColumn(self,iCol,newName): + self.columns[iCol]=newName + self.data.columns.values[iCol]=newName + + def deleteColumns(self,ICol): + """ Delete columns by index, not column names which can have duplicates""" + IKeep =[i for i in np.arange(self.data.shape[1]) if i not in ICol] + self.data = self.data.iloc[:, IKeep] # Drop won't work for duplicates + for i in sorted(ICol, reverse=True): + del(self.columns[i]) + for f in self.formulas: + if f['pos'] == (i + 1): + self.formulas.remove(f) + break + for f in self.formulas: + if f['pos'] > (i + 1): + f['pos'] = f['pos'] - 1 + + def rename(self,new_name): + self.name='>'+new_name + + def addColumn(self,sNewName,NewCol,i=-1,sFormula=''): + if i<0: + i=self.data.shape[1] + self.data.insert(int(i),sNewName,NewCol) + self.columns=self.columnsFromDF(self.data) + for f in self.formulas: + if f['pos'] > i: + f['pos'] = f['pos'] + 1 + self.formulas.append({'pos': i+1, 'formula': sFormula, 'name': sNewName}) + + def setColumn(self,sNewName,NewCol,i,sFormula=''): + if i<1: + raise ValueError('Cannot set column at position ' + str(i)) + self.data = self.data.drop(columns=self.data.columns[i-1]) + self.data.insert(int(i-1),sNewName,NewCol) + self.columns=self.columnsFromDF(self.data) + for f in self.formulas: + if f['pos'] == i: + f['name'] = sNewName + f['formula'] = sFormula + + def getColumn(self,i): + """ Return column of data, where i=0 is the index column + If a mask exist, the mask is applied + """ + if i <= 0 : + x = np.array(range(self.data.shape[0])) + if self.mask is not None: + x=x[self.mask] + + c = None + isString = False + isDate = False + else: + if self.mask is not None: + c = self.data.iloc[self.mask, i-1] + x = self.data.iloc[self.mask, i-1].values + else: + c = self.data.iloc[:, i-1] + x = self.data.iloc[:, i-1].values + + isString = c.dtype == np.object and isinstance(c.values[0], str) + if isString: + x=x.astype(str) + isDate = np.issubdtype(c.dtype, np.datetime64) + if isDate: + dt=getDt(x) + if dt>1: + x=x.astype('datetime64[s]') + else: + x=x.astype('datetime64') + return x,isString,isDate,c + + + + def evalFormula(self,sFormula): + df = self.data + Index = np.array(range(df.shape[0])) + sFormula=sFormula.replace('{Index}','Index') + for i,c in enumerate(self.columns): + c_no_unit = no_unit(c).strip() + c_in_df = df.columns[i] + sFormula=sFormula.replace('{'+c_no_unit+'}','df[\''+c_in_df+'\']') + #print(sFormula) + try: + NewCol=eval(sFormula) + return NewCol + except: + return None + + def addColumnByFormula(self,sNewName,sFormula,i=-1): + NewCol=self.evalFormula(sFormula) + if NewCol is None: + return False + else: + self.addColumn(sNewName,NewCol,i,sFormula) + return True + + def setColumnByFormula(self,sNewName,sFormula,i=-1): + NewCol=self.evalFormula(sFormula) + if NewCol is None: + return False + else: + self.setColumn(sNewName,NewCol,i,sFormula) + return True + + + def export(self,path): + if isinstance(self.data, pd.DataFrame): + try: + self.data.to_csv(path,sep=',',index=False) #python3 + except: + self.data.to_csv(path,sep=str(u',').encode('utf-8'),index=False) #python 2. + else: + raise NotImplementedError('Export of data that is not a dataframe') + + + + @property + def basename(self): + return os.path.splitext(os.path.basename(self.filename))[0] + + @property + def shapestring(self): + return '{}x{}'.format(self.nCols, self.nRows) + + @property + def shape(self): + return (self.nRows, self.nCols) + + + @property + def columns_clean(self): + return [no_unit(s) for s in self.columns] + + @property + def name(self): + if len(self.__name)<=0: + return '' + if self.__name[0]=='>': + return self.__name[1:] + else: + return self.__name + + @property + def raw_name(self): + return self.__name + + @name.setter + def name(self,new_name): + self.__name=new_name + + @property + def nCols(self): + return len(self.columns) + + @property + def nRows(self): + return len(self.data.iloc[:,0]) # TODO if not panda + + +if __name__ == '__main__': + import pandas as pd; + from Tables import Table + import numpy as np + + def OnTabPopup(event): + self.PopupMenu(TablePopup(self,selPanel.tabPanel.lbTab), event.GetPosition()) diff --git a/pydatview/common.py b/pydatview/common.py index 8903ec3..a455e46 100644 --- a/pydatview/common.py +++ b/pydatview/common.py @@ -1,454 +1,456 @@ -import numpy as np -import pandas as pd -import os -import platform -import datetime -import re - -CHAR={ -'menu' : u'\u2630', -'tridot' : u'\u26EC', -'apply' : u'\u1809', -'compute' : u'\u2699', # gear -'close' : u'\u274C', -'add' : u'\u2795', -'add_small': u'\ufe62', -'clear' : u'-', -'sun' : u'\u2600', -'suncloud' : u'\u26C5', -'cloud' : u'\u2601', -'check' : u'\u2714', -'help' : u'\u2753', -'pencil' : u'\u270f', # draw -'pick' : u'\u26cf', -'hammer' : u'\U0001f528', -'wrench' : u'\U0001f527', -'ruler' : u'\U0001F4CF', # measure -'control_knobs' : u'\U0001F39b', -'python' : u'\U0001F40D', -'chart' : u'\U0001F4c8', -'chart_small': u'\U0001F5e0', -} -# --------------------------------------------------------------------------------} -# --- ellude -# --------------------------------------------------------------------------------{ -def common_start(*strings): - """ Returns the longest common substring - from the beginning of the `strings` - """ - if len(strings)==1: - strings=tuple(strings[0]) - def _iter(): - for z in zip(*strings): - if z.count(z[0]) == len(z): # check all elements in `z` are the same - yield z[0] - else: - return - return ''.join(_iter()) - -def common_end(*strings): - if len(strings)==1: - strings=strings[0] - else: - strings=list(strings) - strings = [s[-1::-1] for s in strings] - return common_start(strings)[-1::-1] - -def find_leftstop(s): - for i,c in enumerate(reversed(s)): - if c in ['.','_','|']: - i=i+1 - return s[:len(s)-i] - return s - -def ellude_common(strings,minLength=2): - """ - ellude the common parts of two strings - - minLength: - if -1, string might be elluded up until there are of 0 length - if 0 , if a string of zero length is obtained, it will be tried to be extended until a stop character is found - - """ - # Selecting only the strings that do not start with the safe '>' char - S = [s for i,s in enumerate(strings) if ((len(s)>0) and (s[0]!= '>'))] - if len(S)==0: - pass - elif len(S)==1: - ns=S[0].rfind('|')+1 - ne=0; - else: - ss = common_start(S) - se = common_end(S) - iu = ss[:-1].rfind('_') - ip = ss[:-1].rfind('_') - if iu > 0: - if ip>0: - if iu>ip: - ss=ss[:iu+1] - else: - ss=ss[:iu+1] - - iu = se[:-1].find('_') - if iu > 0: - se=se[iu:] - iu = se[:-1].find('.') - if iu > 0: - se=se[iu:] - ns=len(ss) - ne=len(se) - - # Reduce start length if some strings end up empty - # Look if any of the strings will end up empty - SSS=[len(s[ns:-ne].lstrip('_') if ne>0 else s[ns:].lstrip('_')) for s in S] - currentMinLength=np.min(SSS) - if currentMinLength0: - ss=ss[:-delta] - ns=len(ss) - #print('ss',ss) - ss=find_leftstop(ss) - #print('ss',ss) - if len(ss)==ns: - ns=0 - else: - ns=len(ss)+1 - - for i,s in enumerate(strings): - if len(s)>0 and s[0]=='>': - strings[i]=s[1:] - else: - s=s[ns:-ne] if ne>0 else s[ns:] - strings[i]=s.lstrip('_') - if len(strings[i])==0: - strings[i]='tab{}'.format(i) - return strings - - -# --------------------------------------------------------------------------------} -# --- Key value -# --------------------------------------------------------------------------------{ -def extract_key_tuples(text): - """ - all=(0.1,-2),b=(inf,0), c=(-inf,0.3e+10) - """ - regex = re.compile(r'(?P[\w\-]+)=\((?P[0-9+epinf.-]*?),(?P[0-9+epinf.-]*?)\)($|,)') - return {match.group("key"): (np.float(match.group("value1")),np.float(match.group("value2"))) for match in regex.finditer(text.replace(' ',''))} - - -def extract_key_num(text): - """ - all=0.1, b=inf, c=-0.3e+10 - """ - regex = re.compile(r'(?P[\w\-]+)=(?P[0-9+epinf.-]*?)($|,)') - return {match.group("key"): np.float(match.group("value")) for match in regex.finditer(text.replace(' ',''))} - -# --------------------------------------------------------------------------------} -# --- -# --------------------------------------------------------------------------------{ -# def getMonoFontAbs(): -# import wx -# #return wx.Font(9, wx.MODERN, wx.NORMAL, wx.NORMAL, False, u'Monospace') -# if os.name=='nt': -# font=wx.Font(9, wx.TELETYPE, wx.NORMAL, wx.NORMAL, False) -# elif os.name=='posix': -# font=wx.Font(10, wx.TELETYPE, wx.NORMAL, wx.NORMAL, False) -# else: -# font=wx.Font(8, wx.TELETYPE, wx.NORMAL, wx.NORMAL, False) -# return font -# -# def getMonoFont(widget): -# import wx -# font = widget.GetFont() -# font.SetFamily(wx.TELETYPE) -# if platform.system()=='Windows': -# pass -# elif platform.system()=='Linux': -# pass -# elif platform.system()=='Darwin': -# font.SetPointSize(font.GetPointSize()-1) -# else: -# pass -# return font - -def getDt(x): - """ returns dt in s """ - def myisnat(dt): - if isinstance(dt,pd._libs.tslibs.timedeltas.Timedelta): - try: - dt=pd.to_timedelta(dt) # pandas 1.0 - except: - dt=pd.to_timedelta(dt,box=False) # backward compatibility - - elif isinstance(dt,datetime.timedelta): - dt=np.array([dt],dtype='timedelta64')[0] - return pd.isna(dt) -# try: -# print('>>>', dt,type(dt)) -# isnat=np.isnat(dt) -# except: -# print(type(dt),type(dx)) -# isnat=False -# raise -# return isnat - - - - if len(x)<=1: - return np.NaN - if isinstance(x[0],float): - return x[1]-x[0] - if isinstance(x[0],int) or isinstance(x[0],np.int32) or isinstance(x[0],np.int64): - return x[1]-x[0] - # first try with seconds - #print('') - #print('getDT: dx:',x[1]-x[0]) - dx = x[1]-x[0] - #print(type(dx)) - if myisnat(dx): - # we try the last values (or while loop, but may take a while) - dx = x[-1]-x[-2] - if myisnat(dx): - return np.nan - dt=np.timedelta64(dx,'s').item().total_seconds() - if dt<1: - # try higher resolution - dt=np.timedelta64(dx,'ns').item()/10.**9 - # TODO if dt> int res... do something - return dt - -def getTabCommonColIndices(tabs): - cleanedColLists = [ [cleanCol(s) for s in t.columns] for t in tabs] - commonCols = cleanedColLists[0] - for i in np.arange(1,len(cleanedColLists)): - commonCols = list( set(commonCols) & set( cleanedColLists[i])) - # Keep original order - commonCols =[c for c in cleanedColLists[0] if c in commonCols] # Might have duplicates.. - IMissPerTab=[] - IKeepPerTab=[] - IDuplPerTab=[] # Duplicates amongst the "common" - for cleanedCols in cleanedColLists: - IKeep=[] - IMiss=[] - IDupl=[] - # Ugly for loop here since we have to account for dupplicates - for comcol in commonCols: - I = [i for i, c in enumerate(cleanedCols) if c == comcol] - if len(I)==0: - pass - else: - if I[0] not in IKeep: - IKeep.append(I[0]) - if len(I)>1: - IDupl=IDupl+I[1:] - IMiss=[i for i,_ in enumerate(cleanedCols) if (i not in IKeep) and (i not in IDupl)] - IMissPerTab.append(IMiss) - IKeepPerTab.append(IKeep) - IDuplPerTab.append(IDupl) - return IKeepPerTab, IMissPerTab, IDuplPerTab - - -def cleanCol(s): - s=no_unit(s).strip() - s=no_unit(s.replace('(',' [').replace(')',']')) - s=s.lower().strip().replace('_','').replace(' ','').replace('-','') - return s - -def no_unit(s): - s=s.replace('_[',' [') - iu=s.rfind(' [') - if iu>1: - return s[:iu] - else: - return s - -def unit(s): - iu=s.rfind('[') - if iu>1: - return s[iu+1:].replace(']','') - else: - return '' - -def inverse_unit(s): - u=unit(s).strip() - if u=='': - return '' - elif u=='-': - return '-' - elif len(u)==1: - return '1/'+u; - elif u=='m/s': - return 's/m'; - elif u=='deg': - return '1/deg'; - else: - return '1/('+u+')' - -def filter_list(L, string): - """ simple (not regex or fuzzy) filtering of a list of strings - Returns matched indices and strings - """ - ignore_case = string==string.lower() - if ignore_case: - I=[i for i,s in enumerate(L) if string in s.lower()] - else: - I=[i for i,s in enumerate(L) if string in s] - L_found =np.array(L)[I] - return L_found, I - -def unique(l): - """ Return unique values of a list""" - used=set() - return [x for x in l if x not in used and (used.add(x) or True)] - -# --------------------------------------------------------------------------------} -# --- geometry -# --------------------------------------------------------------------------------{ -def rectangleOverlap(BLx1, BLy1, TRx1, TRy1, BLx2, BLy2, TRx2, TRy2): - """ returns true if two rectangles overlap - BL: Bottom left - TR: top right - "1" rectangle 1 - "2" rectangle 2 - """ - return not (TRx1 < BLx2 or BLx1 > TRx2 or TRy1 < BLy2 or BLy1> TRy2) -# --------------------------------------------------------------------------------} -# --- -# --------------------------------------------------------------------------------{ -def pretty_time(t): - # fPrettyTime: returns a 6-characters string corresponding to the input time in seconds. - # fPrettyTime(612)=='10m12s' - # AUTHOR: E. Branlard - if np.isnan(t): - return 'NaT'; - if(t<0): - return '------'; - elif (t<1) : - c=np.floor(t*100); - s='{:2d}.{:02d}s'.format(0,int(c)) - elif(t<60) : - s=np.floor(t); - c=np.floor((t-s)*100); - s='{:2d}.{:02d}s'.format(int(s),int(c)) - elif(t<3600) : - m=np.floor(t/60); - s=np.mod( np.floor(t), 60); - s='{:2d}m{:02d}s'.format(int(m),int(s)) - elif(t<86400) : - h=np.floor(t/3600); - m=np.floor(( np.mod( np.floor(t) , 3600))/60); - s='{:2d}h{:02d}m'.format(int(h),int(m)) - elif(t<8553600) : #below 3month - d=np.floor(t/86400); - h=np.floor( np.mod(np.floor(t), 86400)/3600); - s='{:2d}d{:02d}h'.format(int(d),int(h)) - elif(t<31536000): - m=t/(3600*24*30.5); - s='{:4.1f}mo'.format(m) - #s='+3mon.'; - else: - y=t/(3600*24*365.25); - s='{:.1f}y'.format(y) - return s - -def pretty_num(x): - if abs(x)<1000 and abs(x)>1e-4: - return "{:9.4f}".format(x) - else: - return '{:.3e}'.format(x) - -def pretty_num_short(x,digits=3): - if digits==4: - if abs(x)<1000 and abs(x)>1e-1: - return "{:.4f}".format(x) - else: - return "{:.4e}".format(x) - elif digits==3: - if abs(x)<1000 and abs(x)>1e-1: - return "{:.3f}".format(x) - else: - return "{:.3e}".format(x) - elif digits==2: - if abs(x)<1000 and abs(x)>1e-1: - return "{:.2f}".format(x) - else: - return "{:.2e}".format(x) - -# --------------------------------------------------------------------------------} -# --- Chinese characters -# --------------------------------------------------------------------------------{ -cjk_ranges = [ - ( 0x4E00, 0x62FF), - ( 0x6300, 0x77FF), - ( 0x7800, 0x8CFF), - ( 0x8D00, 0x9FCC), - ( 0x3400, 0x4DB5), - (0x20000, 0x215FF), - (0x21600, 0x230FF), - (0x23100, 0x245FF), - (0x24600, 0x260FF), - (0x26100, 0x275FF), - (0x27600, 0x290FF), - (0x29100, 0x2A6DF), - (0x2A700, 0x2B734), - (0x2B740, 0x2B81D), - (0x2B820, 0x2CEAF), - (0x2CEB0, 0x2EBEF), - (0x2F800, 0x2FA1F) - ] - -def has_chinese_char(s): - def is_cjk(char): - char = ord(char) - for bottom, top in cjk_ranges: - if char >= bottom and char <= top: - return True - return False - for c in s: - char=ord(c) - for bottom, top in cjk_ranges: - if char >= bottom and char <= top: - return True - return False - - -# --------------------------------------------------------------------------------} -# --- Helper functions -# --------------------------------------------------------------------------------{ -def YesNo(parent, question, caption = 'Yes or no?'): - import wx - dlg = wx.MessageDialog(parent, question, caption, wx.YES_NO | wx.ICON_QUESTION) - result = dlg.ShowModal() == wx.ID_YES - dlg.Destroy() - return result -def Info(parent, message, caption = 'Info'): - import wx - dlg = wx.MessageDialog(parent, message, caption, wx.OK | wx.ICON_INFORMATION) - dlg.ShowModal() - dlg.Destroy() -def Warn(parent, message, caption = 'Warning!'): - import wx - dlg = wx.MessageDialog(parent, message, caption, wx.OK | wx.ICON_WARNING) - dlg.ShowModal() - dlg.Destroy() -def Error(parent, message, caption = 'Error!'): - import wx - dlg = wx.MessageDialog(parent, message, caption, wx.OK | wx.ICON_ERROR) - dlg.ShowModal() - dlg.Destroy() - - - -# --------------------------------------------------------------------------------} -# --- -# --------------------------------------------------------------------------------{ - -def isString(x): - b = x.dtype == np.object and isinstance(x.values[0], str) - return b - -def isDate(x): - return np.issubdtype(x.dtype, np.datetime64) - +import numpy as np +import pandas as pd +import os +import platform +import datetime +import re + +CHAR={ +'menu' : u'\u2630', +'tridot' : u'\u26EC', +'apply' : u'\u1809', +'compute' : u'\u2699', # gear +'close' : u'\u274C', +'add' : u'\u2795', +'add_small': u'\ufe62', +'clear' : u'-', +'sun' : u'\u2600', +'suncloud' : u'\u26C5', +'cloud' : u'\u2601', +'check' : u'\u2714', +'help' : u'\u2753', +'pencil' : u'\u270f', # draw +'pick' : u'\u26cf', +'hammer' : u'\U0001f528', +'wrench' : u'\U0001f527', +'ruler' : u'\U0001F4CF', # measure +'control_knobs' : u'\U0001F39b', +'python' : u'\U0001F40D', +'chart' : u'\U0001F4c8', +'chart_small': u'\U0001F5e0', +} +# --------------------------------------------------------------------------------} +# --- ellude +# --------------------------------------------------------------------------------{ +def common_start(*strings): + """ Returns the longest common substring + from the beginning of the `strings` + """ + if len(strings)==1: + strings=tuple(strings[0]) + def _iter(): + for z in zip(*strings): + if z.count(z[0]) == len(z): # check all elements in `z` are the same + yield z[0] + else: + return + return ''.join(_iter()) + +def common_end(*strings): + if len(strings)==1: + strings=strings[0] + else: + strings=list(strings) + strings = [s[-1::-1] for s in strings] + return common_start(strings)[-1::-1] + +def find_leftstop(s): + for i,c in enumerate(reversed(s)): + if c in ['.','_','|']: + i=i+1 + return s[:len(s)-i] + return s + +def ellude_common(strings,minLength=2): + """ + ellude the common parts of two strings + + minLength: + if -1, string might be elluded up until there are of 0 length + if 0 , if a string of zero length is obtained, it will be tried to be extended until a stop character is found + + """ + # Selecting only the strings that do not start with the safe '>' char + S = [s for i,s in enumerate(strings) if ((len(s)>0) and (s[0]!= '>'))] + if len(S)==0: + pass + elif len(S)==1: + ns=S[0].rfind('|')+1 + ne=0; + else: + ss = common_start(S) + se = common_end(S) + iu = ss[:-1].rfind('_') + ip = ss[:-1].rfind('_') + if iu > 0: + if ip>0: + if iu>ip: + ss=ss[:iu+1] + else: + ss=ss[:iu+1] + + iu = se[:-1].find('_') + if iu > 0: + se=se[iu:] + iu = se[:-1].find('.') + if iu > 0: + se=se[iu:] + ns=len(ss) + ne=len(se) + + # Reduce start length if some strings end up empty + # Look if any of the strings will end up empty + SSS=[len(s[ns:-ne].lstrip('_') if ne>0 else s[ns:].lstrip('_')) for s in S] + currentMinLength=np.min(SSS) + if currentMinLength0: + ss=ss[:-delta] + ns=len(ss) + #print('ss',ss) + ss=find_leftstop(ss) + #print('ss',ss) + if len(ss)==ns: + ns=0 + else: + ns=len(ss)+1 + + for i,s in enumerate(strings): + if len(s)>0 and s[0]=='>': + strings[i]=s[1:] + else: + s=s[ns:-ne] if ne>0 else s[ns:] + strings[i]=s.lstrip('_') + if len(strings[i])==0: + strings[i]='tab{}'.format(i) + return strings + + +# --------------------------------------------------------------------------------} +# --- Key value +# --------------------------------------------------------------------------------{ +def extract_key_tuples(text): + """ + all=(0.1,-2),b=(inf,0), c=(-inf,0.3e+10) + """ + regex = re.compile(r'(?P[\w\-]+)=\((?P[0-9+epinf.-]*?),(?P[0-9+epinf.-]*?)\)($|,)') + return {match.group("key"): (np.float(match.group("value1")),np.float(match.group("value2"))) for match in regex.finditer(text.replace(' ',''))} + + +def extract_key_num(text): + """ + all=0.1, b=inf, c=-0.3e+10 + """ + regex = re.compile(r'(?P[\w\-]+)=(?P[0-9+epinf.-]*?)($|,)') + return {match.group("key"): np.float(match.group("value")) for match in regex.finditer(text.replace(' ',''))} + +# --------------------------------------------------------------------------------} +# --- +# --------------------------------------------------------------------------------{ +# def getMonoFontAbs(): +# import wx +# #return wx.Font(9, wx.MODERN, wx.NORMAL, wx.NORMAL, False, u'Monospace') +# if os.name=='nt': +# font=wx.Font(9, wx.TELETYPE, wx.NORMAL, wx.NORMAL, False) +# elif os.name=='posix': +# font=wx.Font(10, wx.TELETYPE, wx.NORMAL, wx.NORMAL, False) +# else: +# font=wx.Font(8, wx.TELETYPE, wx.NORMAL, wx.NORMAL, False) +# return font +# +# def getMonoFont(widget): +# import wx +# font = widget.GetFont() +# font.SetFamily(wx.TELETYPE) +# if platform.system()=='Windows': +# pass +# elif platform.system()=='Linux': +# pass +# elif platform.system()=='Darwin': +# font.SetPointSize(font.GetPointSize()-1) +# else: +# pass +# return font + +def getDt(x): + """ returns dt in s """ + def myisnat(dt): + if isinstance(dt,pd._libs.tslibs.timedeltas.Timedelta): + try: + dt=pd.to_timedelta(dt) # pandas 1.0 + except: + dt=pd.to_timedelta(dt,box=False) # backward compatibility + + elif isinstance(dt,datetime.timedelta): + dt=np.array([dt],dtype='timedelta64')[0] + return pd.isna(dt) +# try: +# print('>>>', dt,type(dt)) +# isnat=np.isnat(dt) +# except: +# print(type(dt),type(dx)) +# isnat=False +# raise +# return isnat + + + + if len(x)<=1: + return np.NaN + if isinstance(x[0],float): + return x[1]-x[0] + if isinstance(x[0],int) or isinstance(x[0],np.int32) or isinstance(x[0],np.int64): + return x[1]-x[0] + # first try with seconds + #print('') + #print('getDT: dx:',x[1]-x[0]) + dx = x[1]-x[0] + #print(type(dx)) + if myisnat(dx): + # we try the last values (or while loop, but may take a while) + dx = x[-1]-x[-2] + if myisnat(dx): + return np.nan + dt=np.timedelta64(dx,'s').item().total_seconds() + if dt<1: + # try higher resolution + dt=np.timedelta64(dx,'ns').item()/10.**9 + # TODO if dt> int res... do something + return dt + +def getTabCommonColIndices(tabs): + cleanedColLists = [ [cleanCol(s) for s in t.columns] for t in tabs] + nCols = np.array([len(cols) for cols in cleanedColLists]) + # Common columns between all column lists + commonCols = cleanedColLists[0] + for i in np.arange(1,len(cleanedColLists)): + commonCols = list( set(commonCols) & set( cleanedColLists[i])) + # Keep original order + commonCols =[c for c in cleanedColLists[0] if c in commonCols] # Might have duplicates.. + IMissPerTab=[] + IKeepPerTab=[] + IDuplPerTab=[] # Duplicates amongst the "common" + for cleanedCols in cleanedColLists: + IKeep=[] + IMiss=[] + IDupl=[] + # Ugly for loop here since we have to account for dupplicates + for comcol in commonCols: + I = [i for i, c in enumerate(cleanedCols) if c == comcol] + if len(I)==0: + pass + else: + if I[0] not in IKeep: + IKeep.append(I[0]) + if len(I)>1: + IDupl=IDupl+I[1:] + IMiss=[i for i,_ in enumerate(cleanedCols) if (i not in IKeep) and (i not in IDupl)] + IMissPerTab.append(IMiss) + IKeepPerTab.append(IKeep) + IDuplPerTab.append(IDupl) + return IKeepPerTab, IMissPerTab, IDuplPerTab, nCols + + +def cleanCol(s): + s=no_unit(s).strip() + s=no_unit(s.replace('(',' [').replace(')',']')) + s=s.lower().strip().replace('_','').replace(' ','').replace('-','') + return s + +def no_unit(s): + s=s.replace('_[',' [') + iu=s.rfind(' [') + if iu>1: + return s[:iu] + else: + return s + +def unit(s): + iu=s.rfind('[') + if iu>1: + return s[iu+1:].replace(']','') + else: + return '' + +def inverse_unit(s): + u=unit(s).strip() + if u=='': + return '' + elif u=='-': + return '-' + elif len(u)==1: + return '1/'+u; + elif u=='m/s': + return 's/m'; + elif u=='deg': + return '1/deg'; + else: + return '1/('+u+')' + +def filter_list(L, string): + """ simple (not regex or fuzzy) filtering of a list of strings + Returns matched indices and strings + """ + ignore_case = string==string.lower() + if ignore_case: + I=[i for i,s in enumerate(L) if string in s.lower()] + else: + I=[i for i,s in enumerate(L) if string in s] + L_found =np.array(L)[I] + return L_found, I + +def unique(l): + """ Return unique values of a list""" + used=set() + return [x for x in l if x not in used and (used.add(x) or True)] + +# --------------------------------------------------------------------------------} +# --- geometry +# --------------------------------------------------------------------------------{ +def rectangleOverlap(BLx1, BLy1, TRx1, TRy1, BLx2, BLy2, TRx2, TRy2): + """ returns true if two rectangles overlap + BL: Bottom left + TR: top right + "1" rectangle 1 + "2" rectangle 2 + """ + return not (TRx1 < BLx2 or BLx1 > TRx2 or TRy1 < BLy2 or BLy1> TRy2) +# --------------------------------------------------------------------------------} +# --- +# --------------------------------------------------------------------------------{ +def pretty_time(t): + # fPrettyTime: returns a 6-characters string corresponding to the input time in seconds. + # fPrettyTime(612)=='10m12s' + # AUTHOR: E. Branlard + if np.isnan(t): + return 'NaT'; + if(t<0): + return '------'; + elif (t<1) : + c=np.floor(t*100); + s='{:2d}.{:02d}s'.format(0,int(c)) + elif(t<60) : + s=np.floor(t); + c=np.floor((t-s)*100); + s='{:2d}.{:02d}s'.format(int(s),int(c)) + elif(t<3600) : + m=np.floor(t/60); + s=np.mod( np.floor(t), 60); + s='{:2d}m{:02d}s'.format(int(m),int(s)) + elif(t<86400) : + h=np.floor(t/3600); + m=np.floor(( np.mod( np.floor(t) , 3600))/60); + s='{:2d}h{:02d}m'.format(int(h),int(m)) + elif(t<8553600) : #below 3month + d=np.floor(t/86400); + h=np.floor( np.mod(np.floor(t), 86400)/3600); + s='{:2d}d{:02d}h'.format(int(d),int(h)) + elif(t<31536000): + m=t/(3600*24*30.5); + s='{:4.1f}mo'.format(m) + #s='+3mon.'; + else: + y=t/(3600*24*365.25); + s='{:.1f}y'.format(y) + return s + +def pretty_num(x): + if abs(x)<1000 and abs(x)>1e-4: + return "{:9.4f}".format(x) + else: + return '{:.3e}'.format(x) + +def pretty_num_short(x,digits=3): + if digits==4: + if abs(x)<1000 and abs(x)>1e-1: + return "{:.4f}".format(x) + else: + return "{:.4e}".format(x) + elif digits==3: + if abs(x)<1000 and abs(x)>1e-1: + return "{:.3f}".format(x) + else: + return "{:.3e}".format(x) + elif digits==2: + if abs(x)<1000 and abs(x)>1e-1: + return "{:.2f}".format(x) + else: + return "{:.2e}".format(x) + +# --------------------------------------------------------------------------------} +# --- Chinese characters +# --------------------------------------------------------------------------------{ +cjk_ranges = [ + ( 0x4E00, 0x62FF), + ( 0x6300, 0x77FF), + ( 0x7800, 0x8CFF), + ( 0x8D00, 0x9FCC), + ( 0x3400, 0x4DB5), + (0x20000, 0x215FF), + (0x21600, 0x230FF), + (0x23100, 0x245FF), + (0x24600, 0x260FF), + (0x26100, 0x275FF), + (0x27600, 0x290FF), + (0x29100, 0x2A6DF), + (0x2A700, 0x2B734), + (0x2B740, 0x2B81D), + (0x2B820, 0x2CEAF), + (0x2CEB0, 0x2EBEF), + (0x2F800, 0x2FA1F) + ] + +def has_chinese_char(s): + def is_cjk(char): + char = ord(char) + for bottom, top in cjk_ranges: + if char >= bottom and char <= top: + return True + return False + for c in s: + char=ord(c) + for bottom, top in cjk_ranges: + if char >= bottom and char <= top: + return True + return False + + +# --------------------------------------------------------------------------------} +# --- Helper functions +# --------------------------------------------------------------------------------{ +def YesNo(parent, question, caption = 'Yes or no?'): + import wx + dlg = wx.MessageDialog(parent, question, caption, wx.YES_NO | wx.ICON_QUESTION) + result = dlg.ShowModal() == wx.ID_YES + dlg.Destroy() + return result +def Info(parent, message, caption = 'Info'): + import wx + dlg = wx.MessageDialog(parent, message, caption, wx.OK | wx.ICON_INFORMATION) + dlg.ShowModal() + dlg.Destroy() +def Warn(parent, message, caption = 'Warning!'): + import wx + dlg = wx.MessageDialog(parent, message, caption, wx.OK | wx.ICON_WARNING) + dlg.ShowModal() + dlg.Destroy() +def Error(parent, message, caption = 'Error!'): + import wx + dlg = wx.MessageDialog(parent, message, caption, wx.OK | wx.ICON_ERROR) + dlg.ShowModal() + dlg.Destroy() + + + +# --------------------------------------------------------------------------------} +# --- +# --------------------------------------------------------------------------------{ + +def isString(x): + b = x.dtype == np.object and isinstance(x.values[0], str) + return b + +def isDate(x): + return np.issubdtype(x.dtype, np.datetime64) + diff --git a/pydatview/fast/case_gen.py b/pydatview/fast/case_gen.py index a22c090..f9c53f0 100644 --- a/pydatview/fast/case_gen.py +++ b/pydatview/fast/case_gen.py @@ -1,586 +1,593 @@ -from __future__ import division, print_function -import os -import collections -import glob -import pandas as pd -import numpy as np -import shutil -import stat -import re - -# --- Misc fast libraries -import weio.weio.fast_input_file as fi -import pydatview.fast.runner as runner -import pydatview.fast.postpro as postpro -#import pyFAST.input_output.fast_input_file as fi -#import pyFAST.case_generation.runner as runner -#import pyFAST.input_output.postpro as postpro - - -# --------------------------------------------------------------------------------} -# --- Template replace -# --------------------------------------------------------------------------------{ -def handleRemoveReadonlyWin(func, path, exc_info): - """ - Error handler for ``shutil.rmtree``. - If the error is due to an access error (read only file) - it attempts to add write permission and then retries. - Usage : ``shutil.rmtree(path, onerror=onerror)`` - """ - if not os.access(path, os.W_OK): - # Is the error an access error ? - os.chmod(path, stat.S_IWUSR) - func(path) - else: - raise - - -def copyTree(src, dst): - """ - Copy a directory to another one, overwritting files if necessary. - copy_tree from distutils and copytree from shutil fail on Windows (in particular on git files) - """ - def forceMergeFlatDir(srcDir, dstDir): - if not os.path.exists(dstDir): - os.makedirs(dstDir) - for item in os.listdir(srcDir): - srcFile = os.path.join(srcDir, item) - dstFile = os.path.join(dstDir, item) - forceCopyFile(srcFile, dstFile) - - def forceCopyFile (sfile, dfile): - # ---- Handling error due to wrong mod - if os.path.isfile(dfile): - if not os.access(dfile, os.W_OK): - os.chmod(dfile, stat.S_IWUSR) - #print(sfile, ' > ', dfile) - shutil.copy2(sfile, dfile) - - def isAFlatDir(sDir): - for item in os.listdir(sDir): - sItem = os.path.join(sDir, item) - if os.path.isdir(sItem): - return False - return True - - for item in os.listdir(src): - s = os.path.join(src, item) - d = os.path.join(dst, item) - if os.path.isfile(s): - if not os.path.exists(dst): - os.makedirs(dst) - forceCopyFile(s,d) - if os.path.isdir(s): - isRecursive = not isAFlatDir(s) - if isRecursive: - copyTree(s, d) - else: - forceMergeFlatDir(s, d) - - -def templateReplaceGeneral(PARAMS, templateDir=None, outputDir=None, main_file=None, removeAllowed=False, removeRefSubFiles=False, oneSimPerDir=False): - """ Generate inputs files by replacing different parameters from a template file. - The generated files are placed in the output directory `outputDir` - The files are read and written using the library `weio`. - The template file is read and its content can be changed like a dictionary. - Each item of `PARAMS` correspond to a set of parameters that will be replaced - in the template file to generate one input file. - - For "FAST" input files, parameters can be changed recursively. - - - INPUTS: - PARAMS: list of dictionaries. Each key of the dictionary should be a key present in the - template file when read with `weio` (see: weio.read(main_file).keys() ) - - PARAMS[0]={'DT':0.1, 'EDFile|GBRatio':1, 'ServoFile|GenEff':0.8} - - templateDir: if provided, this directory and its content will be copied to `outputDir` - before doing the parametric substitution - - outputDir : directory where files will be generated. - """ - # --- Helper functions - def rebase_rel(wd,s,sid): - split = os.path.splitext(s) - return os.path.join(wd,split[0]+sid+split[1]) - - def get_strID(p) : - if '__name__' in p.keys(): - strID=p['__name__'] - else: - raise Exception('When calling `templateReplace`, provide the key `__name_` in the parameter dictionaries') - return strID - - def splitAddress(sAddress): - sp = sAddress.split('|') - if len(sp)==1: - return sp[0],[] - else: - return sp[0],sp[1:] - - def rebaseFileName(org_filename, workDir, strID): - new_filename_full = rebase_rel(workDir, org_filename,'_'+strID) - new_filename = os.path.relpath(new_filename_full,workDir).replace('\\','/') - return new_filename, new_filename_full - - def replaceRecurse(templatename_or_newname, FileKey, ParamKey, ParamValue, Files, strID, workDir, TemplateFiles): - """ - FileKey: a single key defining which file we are currently modifying e.g. :'AeroFile', 'EDFile','FVWInputFileName' - ParamKey: the address key of the parameter to be changed, relative to the current FileKey - e.g. 'EDFile|IntMethod' (if FileKey is '') - 'IntMethod' (if FileKey is 'EDFile') - ParamValue: the value to be used - Files: dict of files, as returned by weio, keys are "FileKeys" - """ - # --- Special handling for the root - if FileKey=='': - FileKey='Root' - # --- Open (or get if already open) file where a parameter needs to be changed - if FileKey in Files.keys(): - # The file was already opened, it's stored - f = Files[FileKey] - newfilename_full = f.filename - newfilename = os.path.relpath(newfilename_full,workDir).replace('\\','/') - - else: - templatefilename = templatename_or_newname - templatefilename_full = os.path.join(workDir,templatefilename) - TemplateFiles.append(templatefilename_full) - if FileKey=='Root': - # Root files, we start from strID - ext = os.path.splitext(templatefilename)[-1] - newfilename_full = os.path.join(wd,strID+ext) - newfilename = strID+ext - else: - newfilename, newfilename_full = rebaseFileName(templatefilename, workDir, strID) - #print('--------------------------------------------------------------') - #print('TemplateFile :', templatefilename) - #print('TemplateFileFull:', templatefilename_full) - #print('NewFile :', newfilename) - #print('NewFileFull :', newfilename_full) - shutil.copyfile(templatefilename_full, newfilename_full) - f= fi.FASTInputFile(newfilename_full) # open the template file for that filekey - Files[FileKey]=f # store it - - # --- Changing parameters in that file - NewFileKey_or_Key, ChildrenKeys = splitAddress(ParamKey) - if len(ChildrenKeys)==0: - # A simple parameter is changed - Key = NewFileKey_or_Key - #print('Setting', FileKey, '|',Key, 'to',ParamValue) - if Key=='OutList': - OutList=f[Key] - f[Key]=addToOutlist(OutList, ParamValue) - else: - f[Key] = ParamValue - else: - # Parameters needs to be changed in subfiles (children) - NewFileKey = NewFileKey_or_Key - ChildrenKey = '|'.join(ChildrenKeys) - child_templatefilename = f[NewFileKey].strip('"') # old filename that will be used as a template - baseparent = os.path.dirname(newfilename) - #print('Child templatefilename:',child_templatefilename) - #print('Parent base dir :',baseparent) - workDir = os.path.join(workDir, baseparent) - - # - newchildFilename, Files = replaceRecurse(child_templatefilename, NewFileKey, ChildrenKey, ParamValue, Files, strID, workDir, TemplateFiles) - #print('Setting', FileKey, '|',NewFileKey, 'to',newchildFilename) - f[NewFileKey] = '"'+newchildFilename+'"' - - return newfilename, Files - - - # --- Safety checks - if templateDir is None and outputDir is None: - raise Exception('Provide at least a template directory OR an output directory') - - if templateDir is not None: - if not os.path.exists(templateDir): - raise Exception('Template directory does not exist: '+templateDir) - - # Default value of outputDir if not provided - if templateDir[-1]=='/' or templateDir[-1]=='\\' : - templateDir=templateDir[0:-1] - if outputDir is None: - outputDir=templateDir+'_Parametric' - - # --- Main file use as "master" - if templateDir is not None: - main_file=os.path.join(outputDir, os.path.basename(main_file)) - else: - main_file=main_file - - # Params need to be a list - if not isinstance(PARAMS,list): - PARAMS=[PARAMS] - - if oneSimPerDir: - workDirS=[os.path.join(outputDir,get_strID(p)) for p in PARAMS] - else: - workDirS=[outputDir]*len(PARAMS) - # --- Creating outputDir - Copying template folder to outputDir if necessary - # Copying template folder to workDir - for wd in list(set(workDirS)): - if removeAllowed: - removeFASTOuputs(wd) - if os.path.exists(wd) and removeAllowed: - shutil.rmtree(wd, ignore_errors=False, onerror=handleRemoveReadonlyWin) - copyTree(templateDir, wd) - if removeAllowed: - removeFASTOuputs(wd) - - - TemplateFiles=[] - files=[] - for ip,(wd,p) in enumerate(zip(workDirS,PARAMS)): - if '__index__' not in p.keys(): - p['__index__']=ip - - main_file_base = os.path.basename(main_file) - strID = get_strID(p) - # --- Setting up files for this simulation - Files=dict() - for k,v in p.items(): - if k =='__index__' or k=='__name__': - continue - new_mainFile, Files = replaceRecurse(main_file_base, '', k, v, Files, strID, wd, TemplateFiles) - - # --- Writting files - for k,f in Files.items(): - if k=='Root': - files.append(f.filename) - f.write() - - # --- Remove extra files at the end - if removeRefSubFiles: - TemplateFiles, nCounts = np.unique(TemplateFiles, return_counts=True) - if not oneSimPerDir: - # we can only detele template files that were used by ALL simulations - TemplateFiles=[t for nc,t in zip(nCounts, TemplateFiles) if nc==len(PARAMS)] - for tf in TemplateFiles: - try: - os.remove(tf) - except: - print('[FAIL] Removing '+tf) - pass - return files - -def templateReplace(PARAMS, templateDir, outputDir=None, main_file=None, removeAllowed=False, removeRefSubFiles=False, oneSimPerDir=False): - """ Replace parameters in a fast folder using a list of dictionaries where the keys are for instance: - 'DT', 'EDFile|GBRatio', 'ServoFile|GenEff' - """ - # --- For backward compatibility, remove "FAST|" from the keys - for p in PARAMS: - old_keys=[ k for k,_ in p.items() if k.find('FAST|')==0] - for k_old in old_keys: - k_new=k_old.replace('FAST|','') - p[k_new] = p.pop(k_old) - - return templateReplaceGeneral(PARAMS, templateDir, outputDir=outputDir, main_file=main_file, - removeAllowed=removeAllowed, removeRefSubFiles=removeRefSubFiles, oneSimPerDir=oneSimPerDir) - -def removeFASTOuputs(workDir): - # Cleaning folder - for f in glob.glob(os.path.join(workDir,'*.out')): - os.remove(f) - for f in glob.glob(os.path.join(workDir,'*.outb')): - os.remove(f) - for f in glob.glob(os.path.join(workDir,'*.ech')): - os.remove(f) - for f in glob.glob(os.path.join(workDir,'*.sum')): - os.remove(f) - -# --------------------------------------------------------------------------------} -# --- Tools for template replacement -# --------------------------------------------------------------------------------{ -def paramsSteadyAero(p=dict()): - p['AeroFile|AFAeroMod']=1 # remove dynamic effects dynamic - p['AeroFile|WakeMod']=1 # remove dynamic inflow dynamic - p['AeroFile|TwrPotent']=0 # remove tower shadow - return p - -def paramsNoGen(p=dict()): - p['EDFile|GenDOF' ] = 'False' - return p - -def paramsGen(p=dict()): - p['EDFile|GenDOF' ] = 'True' - return p - -def paramsNoController(p=dict()): - p['ServoFile|PCMode'] = 0; - p['ServoFile|VSContrl'] = 0; - p['ServoFile|YCMode'] = 0; - return p - -def paramsControllerDLL(p=dict()): - p['ServoFile|PCMode'] = 5; - p['ServoFile|VSContrl'] = 5; - p['ServoFile|YCMode'] = 5; - p['EDFile|GenDOF'] = 'True'; - return p - - -def paramsStiff(p=dict()): - p['EDFile|FlapDOF1'] = 'False' - p['EDFile|FlapDOF2'] = 'False' - p['EDFile|EdgeDOF' ] = 'False' - p['EDFile|TeetDOF' ] = 'False' - p['EDFile|DrTrDOF' ] = 'False' - p['EDFile|YawDOF' ] = 'False' - p['EDFile|TwFADOF1'] = 'False' - p['EDFile|TwFADOF2'] = 'False' - p['EDFile|TwSSDOF1'] = 'False' - p['EDFile|TwSSDOF2'] = 'False' - p['EDFile|PtfmSgDOF'] = 'False' - p['EDFile|PtfmSwDOF'] = 'False' - p['EDFile|PtfmHvDOF'] = 'False' - p['EDFile|PtfmRDOF'] = 'False' - p['EDFile|PtfmPDOF'] = 'False' - p['EDFile|PtfmYDOF'] = 'False' - return p - -def paramsWS_RPM_Pitch(WS, RPM, Pitch, baseDict=None, flatInputs=False): - """ - Generate OpenFAST "parameters" (list of dictionaries with "address") - chaing the inputs in ElastoDyn, InflowWind for different wind speed, RPM and Pitch - """ - # --- Ensuring everythin is an iterator - def iterify(x): - if not isinstance(x, collections.Iterable): x = [x] - return x - WS = iterify(WS) - RPM = iterify(RPM) - Pitch = iterify(Pitch) - # --- If inputs are not flat but different vectors to length through, we flatten them (TODO: meshgrid and ravel?) - if not flatInputs : - WS_flat = [] - Pitch_flat = [] - RPM_flat = [] - for pitch in Pitch: - for rpm in RPM: - for ws in WS: - WS_flat.append(ws) - RPM_flat.append(rpm) - Pitch_flat.append(pitch) - else: - WS_flat, Pitch_flat, RPM_flat = WS, Pitch, RPM - - # --- Defining the parametric study - PARAMS=[] - i=0 - for ws,rpm,pitch in zip(WS_flat,RPM_flat,Pitch_flat): - if baseDict is None: - p=dict() - else: - p = baseDict.copy() - p['EDFile|RotSpeed'] = rpm - p['InflowFile|HWindSpeed'] = ws - p['InflowFile|WindType'] = 1 # Setting steady wind - p['EDFile|BlPitch(1)'] = pitch - p['EDFile|BlPitch(2)'] = pitch - p['EDFile|BlPitch(3)'] = pitch - - p['__index__'] = i - p['__name__'] = '{:03d}_ws{:04.1f}_pt{:04.2f}_om{:04.2f}'.format(p['__index__'],p['InflowFile|HWindSpeed'],p['EDFile|BlPitch(1)'],p['EDFile|RotSpeed']) - i=i+1 - PARAMS.append(p) - return PARAMS - -def paramsLinearTrim(p=dict()): - - # Set a few DOFs, move this to main file - p['Linearize'] = True - p['CalcSteady'] = True - p['TrimGain'] = 1e-4 - p['TrimTol'] = 1e-5 - p['CompMooring'] = 0 - p['CompHydro'] = 0 - p['LinOutJac'] = False - p['LinOutMod'] = False - p['OutFmt'] = '"ES20.12E3"' # Important for decent resolution - - p['AeroFile|AFAeroMod'] = 1 - p['AeroFile|CavitCheck'] = 'False' - p['AeroFile|CompAA'] = 'False' - - p['ServoFile|PCMode'] = 0 - p['ServoFile|VSContrl'] = 1 - - p['ServoFile|CompNTMD'] = 'False' - p['ServoFile|CompTTMD'] = 'False' - - # Set all DOFs off, enable as desired - p['EDFile|FlapDOF1'] = 'False' - p['EDFile|FlapDOF2'] = 'False' - p['EDFile|EdgeDOF'] = 'False' - p['EDFile|TeetDOF'] = 'False' - p['EDFile|DrTrDOF'] = 'False' - p['EDFile|GenDOF'] = 'False' - p['EDFile|YawDOF'] = 'False' - p['EDFile|TwFADOF1'] = 'False' - p['EDFile|TwFADOF2'] = 'False' - p['EDFile|TwSSDOF1'] = 'False' - p['EDFile|TwSSDOF2'] = 'False' - p['EDFile|PtfmSgDOF'] = 'False' - p['EDFile|PtfmSwDOF'] = 'False' - p['EDFile|PtfmHvDOF'] = 'False' - p['EDFile|PtfmRDOF'] = 'False' - p['EDFile|PtfmPDOF'] = 'False' - p['EDFile|PtfmYDOF'] = 'False' - - - return p - -# --------------------------------------------------------------------------------} -# --- -# --------------------------------------------------------------------------------{ -def createStepWind(filename,WSstep=1,WSmin=3,WSmax=25,tstep=100,dt=0.5,tmin=0,tmax=999): - f = weio.FASTWndFile() - Steps= np.arange(WSmin,WSmax+WSstep,WSstep) - print(Steps) - nCol = len(f.colNames) - nRow = len(Steps)*2 - M = np.zeros((nRow,nCol)); - M[0,0] = tmin - M[0,1] = WSmin - for i,s in enumerate(Steps[:-1]): - M[2*i+1,0] = tmin + (i+1)*tstep-dt - M[2*i+2,0] = tmin + (i+1)*tstep - M[2*i+1,1] = Steps[i] - if i0: - main_fastfile=os.path.basename(main_fastfile) - - # --- Reading main fast file to get rotor radius - fst = fi.FASTInputFile(os.path.join(refdir,main_fastfile)) - ed = fi.FASTInputFile(os.path.join(refdir,fst['EDFile'].replace('"',''))) - R = ed['TipRad'] - - # --- Making sure we have - if (Omega is not None): - if (Lambda is not None): - WS = np.ones(Omega.shape)*WS_default - elif (WS is not None): - if len(WS)!=len(Omega): - raise Exception('When providing Omega and WS, both vectors should have the same dimension') - else: - WS = np.ones(Omega.shape)*WS_default - else: - Omega = WS_default * Lambda/R*60/(2*np.pi) # TODO, use more realistic combinations of WS and Omega - WS = np.ones(Omega.shape)*WS_default - - - # --- Defining flat vectors of operating conditions - WS_flat = [] - RPM_flat = [] - Pitch_flat = [] - for pitch in Pitch: - for (rpm,ws) in zip(Omega,WS): - WS_flat.append(ws) - RPM_flat.append(rpm) - Pitch_flat.append(pitch) - # --- Setting up default options - baseDict={'TMax': TMax, 'DT': 0.01, 'DT_Out': 0.1} # NOTE: Tmax should be at least 2pi/Omega - baseDict = paramsNoController(baseDict) - if bStiff: - baseDict = paramsStiff(baseDict) - if bNoGen: - baseDict = paramsNoGen(baseDict) - if bSteadyAero: - baseDict = paramsSteadyAero(baseDict) - - # --- Creating set of parameters to be changed - # TODO: verify that RtAeroCp and RtAeroCt are present in AeroDyn outlist - PARAMS = paramsWS_RPM_Pitch(WS_flat,RPM_flat,Pitch_flat,baseDict=baseDict, FlatInputs=True) - - # --- Generating all files in a workDir - workDir = refdir.strip('/').strip('\\')+'_CPLambdaPitch' - print('>>> Generating inputs files in {}'.format(workDir)) - RemoveAllowed=reRun # If the user want to rerun, we can remove, otherwise we keep existing simulations - fastFiles=templateReplace(PARAMS, refdir, outputDir=workDir,removeRefSubFiles=True,removeAllowed=RemoveAllowed,main_file=main_fastfile) - - # --- Running fast simulations - print('>>> Running {} simulations...'.format(len(fastFiles))) - runner.run_fastfiles(fastFiles, showOutputs=showOutputs, fastExe=fastExe, nCores=nCores, reRun=reRun) - - # --- Postpro - Computing averages at the end of the simluation - print('>>> Postprocessing...') - outFiles = [os.path.splitext(f)[0]+'.outb' for f in fastFiles] - # outFiles = glob.glob(os.path.join(workDir,'*.outb')) - ColKeepStats = ['RotSpeed_[rpm]','BldPitch1_[deg]','RtAeroCp_[-]','RtAeroCt_[-]','Wind1VelX_[m/s]'] - result = postpro.averagePostPro(outFiles,avgMethod='periods',avgParam=1,ColKeep=ColKeepStats,ColSort='RotSpeed_[rpm]') - # print(result) - - # --- Adding lambda, sorting and keeping only few columns - result['lambda_[-]'] = result['RotSpeed_[rpm]']*R*2*np.pi/60/result['Wind1VelX_[m/s]'] - result.sort_values(['lambda_[-]','BldPitch1_[deg]'],ascending=[True,True],inplace=True) - ColKeepFinal=['lambda_[-]','BldPitch1_[deg]','RtAeroCp_[-]','RtAeroCt_[-]'] - result=result[ColKeepFinal] - print('>>> Done') - - # --- Converting to a matrices - CP = result['RtAeroCp_[-]'].values - CT = result['RtAeroCt_[-]'].values - MCP =CP.reshape((len(Lambda),len(Pitch))) - MCT =CT.reshape((len(Lambda),len(Pitch))) - LAMBDA, PITCH = np.meshgrid(Lambda, Pitch) - # --- CP max - i,j = np.unravel_index(MCP.argmax(), MCP.shape) - MaxVal={'CP_max':MCP[i,j],'lambda_opt':LAMBDA[j,i],'pitch_opt':PITCH[j,i]} - - return MCP,MCT,Lambda,Pitch,MaxVal,result - - -if __name__=='__main__': - # --- Test of templateReplace - PARAMS = {} - PARAMS['TMax'] = 10 - PARAMS['__name__'] = 'MyName' - PARAMS['DT'] = 0.01 - PARAMS['DT_Out'] = 0.1 - PARAMS['EDFile|RotSpeed'] = 100 - PARAMS['EDFile|BlPitch(1)'] = 1 - PARAMS['EDFile|GBoxEff'] = 0.92 - PARAMS['ServoFile|VS_Rgn2K'] = 0.00038245 - PARAMS['ServoFile|GenEff'] = 0.95 - PARAMS['InflowFile|HWindSpeed'] = 8 - templateReplace(PARAMS,refDir,RemoveRefSubFiles=True) - +from __future__ import division, print_function +import os +import collections +import glob +import pandas as pd +import numpy as np +import shutil +import stat +import re + +# --- Misc fast libraries +import weio.weio.fast_input_file as fi +import pydatview.fast.runner as runner +import pydatview.fast.postpro as postpro +#import pyFAST.input_output.fast_input_file as fi +#import pyFAST.case_generation.runner as runner +#import pyFAST.input_output.postpro as postpro + + +# --------------------------------------------------------------------------------} +# --- Template replace +# --------------------------------------------------------------------------------{ +def handleRemoveReadonlyWin(func, path, exc_info): + """ + Error handler for ``shutil.rmtree``. + If the error is due to an access error (read only file) + it attempts to add write permission and then retries. + Usage : ``shutil.rmtree(path, onerror=onerror)`` + """ + if not os.access(path, os.W_OK): + # Is the error an access error ? + os.chmod(path, stat.S_IWUSR) + func(path) + else: + raise + + +def forceCopyFile (sfile, dfile): + # ---- Handling error due to wrong mod + if os.path.isfile(dfile): + if not os.access(dfile, os.W_OK): + os.chmod(dfile, stat.S_IWUSR) + #print(sfile, ' > ', dfile) + shutil.copy2(sfile, dfile) + +def copyTree(src, dst): + """ + Copy a directory to another one, overwritting files if necessary. + copy_tree from distutils and copytree from shutil fail on Windows (in particular on git files) + """ + def forceMergeFlatDir(srcDir, dstDir): + if not os.path.exists(dstDir): + os.makedirs(dstDir) + for item in os.listdir(srcDir): + srcFile = os.path.join(srcDir, item) + dstFile = os.path.join(dstDir, item) + forceCopyFile(srcFile, dstFile) + + def isAFlatDir(sDir): + for item in os.listdir(sDir): + sItem = os.path.join(sDir, item) + if os.path.isdir(sItem): + return False + return True + + for item in os.listdir(src): + s = os.path.join(src, item) + d = os.path.join(dst, item) + if os.path.isfile(s): + if not os.path.exists(dst): + os.makedirs(dst) + forceCopyFile(s,d) + if os.path.isdir(s): + isRecursive = not isAFlatDir(s) + if isRecursive: + copyTree(s, d) + else: + forceMergeFlatDir(s, d) + + +def templateReplaceGeneral(PARAMS, templateDir=None, outputDir=None, main_file=None, removeAllowed=False, removeRefSubFiles=False, oneSimPerDir=False): + """ Generate inputs files by replacing different parameters from a template file. + The generated files are placed in the output directory `outputDir` + The files are read and written using the library `weio`. + The template file is read and its content can be changed like a dictionary. + Each item of `PARAMS` correspond to a set of parameters that will be replaced + in the template file to generate one input file. + + For "FAST" input files, parameters can be changed recursively. + + + INPUTS: + PARAMS: list of dictionaries. Each key of the dictionary should be a key present in the + template file when read with `weio` (see: weio.read(main_file).keys() ) + + PARAMS[0]={'DT':0.1, 'EDFile|GBRatio':1, 'ServoFile|GenEff':0.8} + + templateDir: if provided, this directory and its content will be copied to `outputDir` + before doing the parametric substitution + + outputDir : directory where files will be generated. + """ + # --- Helper functions + def rebase_rel(wd,s,sid): + split = os.path.splitext(s) + return os.path.join(wd,split[0]+sid+split[1]) + + def get_strID(p) : + if '__name__' in p.keys(): + strID=p['__name__'] + else: + raise Exception('When calling `templateReplace`, provide the key `__name_` in the parameter dictionaries') + return strID + + def splitAddress(sAddress): + sp = sAddress.split('|') + if len(sp)==1: + return sp[0],[] + else: + return sp[0],sp[1:] + + def rebaseFileName(org_filename, workDir, strID): + new_filename_full = rebase_rel(workDir, org_filename,'_'+strID) + new_filename = os.path.relpath(new_filename_full,workDir).replace('\\','/') + return new_filename, new_filename_full + + def replaceRecurse(templatename_or_newname, FileKey, ParamKey, ParamValue, Files, strID, workDir, TemplateFiles): + """ + FileKey: a single key defining which file we are currently modifying e.g. :'AeroFile', 'EDFile','FVWInputFileName' + ParamKey: the address key of the parameter to be changed, relative to the current FileKey + e.g. 'EDFile|IntMethod' (if FileKey is '') + 'IntMethod' (if FileKey is 'EDFile') + ParamValue: the value to be used + Files: dict of files, as returned by weio, keys are "FileKeys" + """ + # --- Special handling for the root + if FileKey=='': + FileKey='Root' + # --- Open (or get if already open) file where a parameter needs to be changed + if FileKey in Files.keys(): + # The file was already opened, it's stored + f = Files[FileKey] + newfilename_full = f.filename + newfilename = os.path.relpath(newfilename_full,workDir).replace('\\','/') + + else: + templatefilename = templatename_or_newname + templatefilename_full = os.path.join(workDir,templatefilename) + TemplateFiles.append(templatefilename_full) + if FileKey=='Root': + # Root files, we start from strID + ext = os.path.splitext(templatefilename)[-1] + newfilename_full = os.path.join(wd,strID+ext) + newfilename = strID+ext + else: + newfilename, newfilename_full = rebaseFileName(templatefilename, workDir, strID) + #print('--------------------------------------------------------------') + #print('TemplateFile :', templatefilename) + #print('TemplateFileFull:', templatefilename_full) + #print('NewFile :', newfilename) + #print('NewFileFull :', newfilename_full) + shutil.copyfile(templatefilename_full, newfilename_full) + f= fi.FASTInputFile(newfilename_full) # open the template file for that filekey + Files[FileKey]=f # store it + + # --- Changing parameters in that file + NewFileKey_or_Key, ChildrenKeys = splitAddress(ParamKey) + if len(ChildrenKeys)==0: + # A simple parameter is changed + Key = NewFileKey_or_Key + #print('Setting', FileKey, '|',Key, 'to',ParamValue) + if Key=='OutList': + OutList=f[Key] + f[Key]=addToOutlist(OutList, ParamValue) + else: + f[Key] = ParamValue + else: + # Parameters needs to be changed in subfiles (children) + NewFileKey = NewFileKey_or_Key + ChildrenKey = '|'.join(ChildrenKeys) + child_templatefilename = f[NewFileKey].strip('"') # old filename that will be used as a template + baseparent = os.path.dirname(newfilename) + #print('Child templatefilename:',child_templatefilename) + #print('Parent base dir :',baseparent) + workDir = os.path.join(workDir, baseparent) + + # + newchildFilename, Files = replaceRecurse(child_templatefilename, NewFileKey, ChildrenKey, ParamValue, Files, strID, workDir, TemplateFiles) + #print('Setting', FileKey, '|',NewFileKey, 'to',newchildFilename) + f[NewFileKey] = '"'+newchildFilename+'"' + + return newfilename, Files + + + # --- Safety checks + if templateDir is None and outputDir is None: + raise Exception('Provide at least a template directory OR an output directory') + + if templateDir is not None: + if not os.path.exists(templateDir): + raise Exception('Template directory does not exist: '+templateDir) + + # Default value of outputDir if not provided + if templateDir[-1]=='/' or templateDir[-1]=='\\' : + templateDir=templateDir[0:-1] + if outputDir is None: + outputDir=templateDir+'_Parametric' + + # --- Main file use as "master" + if templateDir is not None: + main_file=os.path.join(outputDir, os.path.basename(main_file)) + else: + main_file=main_file + + # Params need to be a list + if not isinstance(PARAMS,list): + PARAMS=[PARAMS] + + if oneSimPerDir: + workDirS=[os.path.join(outputDir,get_strID(p)) for p in PARAMS] + else: + workDirS=[outputDir]*len(PARAMS) + # --- Creating outputDir - Copying template folder to outputDir if necessary + # Copying template folder to workDir + for wd in list(set(workDirS)): + if removeAllowed: + removeFASTOuputs(wd) + if os.path.exists(wd) and removeAllowed: + shutil.rmtree(wd, ignore_errors=False, onerror=handleRemoveReadonlyWin) + copyTree(templateDir, wd) + if removeAllowed: + removeFASTOuputs(wd) + + + TemplateFiles=[] + files=[] + for ip,(wd,p) in enumerate(zip(workDirS,PARAMS)): + if '__index__' not in p.keys(): + p['__index__']=ip + + main_file_base = os.path.basename(main_file) + strID = get_strID(p) + # --- Setting up files for this simulation + Files=dict() + for k,v in p.items(): + if k =='__index__' or k=='__name__': + continue + new_mainFile, Files = replaceRecurse(main_file_base, '', k, v, Files, strID, wd, TemplateFiles) + + # --- Writting files + for k,f in Files.items(): + if k=='Root': + files.append(f.filename) + f.write() + + # --- Remove extra files at the end + if removeRefSubFiles: + TemplateFiles, nCounts = np.unique(TemplateFiles, return_counts=True) + if not oneSimPerDir: + # we can only detele template files that were used by ALL simulations + TemplateFiles=[t for nc,t in zip(nCounts, TemplateFiles) if nc==len(PARAMS)] + for tf in TemplateFiles: + try: + os.remove(tf) + except: + print('[FAIL] Removing '+tf) + pass + return files + +def templateReplace(PARAMS, templateDir, outputDir=None, main_file=None, removeAllowed=False, removeRefSubFiles=False, oneSimPerDir=False): + """ Replace parameters in a fast folder using a list of dictionaries where the keys are for instance: + 'DT', 'EDFile|GBRatio', 'ServoFile|GenEff' + """ + # --- For backward compatibility, remove "FAST|" from the keys + for p in PARAMS: + old_keys=[ k for k,_ in p.items() if k.find('FAST|')==0] + for k_old in old_keys: + k_new=k_old.replace('FAST|','') + p[k_new] = p.pop(k_old) + + return templateReplaceGeneral(PARAMS, templateDir, outputDir=outputDir, main_file=main_file, + removeAllowed=removeAllowed, removeRefSubFiles=removeRefSubFiles, oneSimPerDir=oneSimPerDir) + +def removeFASTOuputs(workDir): + # Cleaning folder + for f in glob.glob(os.path.join(workDir,'*.out')): + os.remove(f) + for f in glob.glob(os.path.join(workDir,'*.outb')): + os.remove(f) + for f in glob.glob(os.path.join(workDir,'*.ech')): + os.remove(f) + for f in glob.glob(os.path.join(workDir,'*.sum')): + os.remove(f) + +# --------------------------------------------------------------------------------} +# --- Tools for template replacement +# --------------------------------------------------------------------------------{ +def paramsSteadyAero(p=None): + p = dict() if p is None else p + p['AeroFile|AFAeroMod']=1 # remove dynamic effects dynamic + p['AeroFile|WakeMod']=1 # remove dynamic inflow dynamic + p['AeroFile|TwrPotent']=0 # remove tower shadow + return p + +def paramsNoGen(p=None): + p = dict() if p is None else p + p['EDFile|GenDOF' ] = 'False' + return p + +def paramsGen(p=None): + p = dict() if p is None else p + p['EDFile|GenDOF' ] = 'True' + return p + +def paramsNoController(p=None): + p = dict() if p is None else p + p['ServoFile|PCMode'] = 0; + p['ServoFile|VSContrl'] = 0; + p['ServoFile|YCMode'] = 0; + return p + +def paramsControllerDLL(p=None): + p = dict() if p is None else p + p['ServoFile|PCMode'] = 5; + p['ServoFile|VSContrl'] = 5; + p['ServoFile|YCMode'] = 5; + p['EDFile|GenDOF'] = 'True'; + return p + + +def paramsStiff(p=None): + p = dict() if p is None else p + p['EDFile|FlapDOF1'] = 'False' + p['EDFile|FlapDOF2'] = 'False' + p['EDFile|EdgeDOF' ] = 'False' + p['EDFile|TeetDOF' ] = 'False' + p['EDFile|DrTrDOF' ] = 'False' + p['EDFile|YawDOF' ] = 'False' + p['EDFile|TwFADOF1'] = 'False' + p['EDFile|TwFADOF2'] = 'False' + p['EDFile|TwSSDOF1'] = 'False' + p['EDFile|TwSSDOF2'] = 'False' + p['EDFile|PtfmSgDOF'] = 'False' + p['EDFile|PtfmSwDOF'] = 'False' + p['EDFile|PtfmHvDOF'] = 'False' + p['EDFile|PtfmRDOF'] = 'False' + p['EDFile|PtfmPDOF'] = 'False' + p['EDFile|PtfmYDOF'] = 'False' + return p + +def paramsWS_RPM_Pitch(WS, RPM, Pitch, baseDict=None, flatInputs=False): + """ + Generate OpenFAST "parameters" (list of dictionaries with "address") + chaing the inputs in ElastoDyn, InflowWind for different wind speed, RPM and Pitch + """ + # --- Ensuring everythin is an iterator + def iterify(x): + if not isinstance(x, collections.Iterable): x = [x] + return x + WS = iterify(WS) + RPM = iterify(RPM) + Pitch = iterify(Pitch) + # --- If inputs are not flat but different vectors to length through, we flatten them (TODO: meshgrid and ravel?) + if not flatInputs : + WS_flat = [] + Pitch_flat = [] + RPM_flat = [] + for pitch in Pitch: + for rpm in RPM: + for ws in WS: + WS_flat.append(ws) + RPM_flat.append(rpm) + Pitch_flat.append(pitch) + else: + WS_flat, Pitch_flat, RPM_flat = WS, Pitch, RPM + + # --- Defining the parametric study + PARAMS=[] + i=0 + for ws,rpm,pitch in zip(WS_flat,RPM_flat,Pitch_flat): + if baseDict is None: + p=dict() + else: + p = baseDict.copy() + p['EDFile|RotSpeed'] = rpm + p['InflowFile|HWindSpeed'] = ws + p['InflowFile|WindType'] = 1 # Setting steady wind + p['EDFile|BlPitch(1)'] = pitch + p['EDFile|BlPitch(2)'] = pitch + p['EDFile|BlPitch(3)'] = pitch + + p['__index__'] = i + p['__name__'] = '{:03d}_ws{:04.1f}_pt{:04.2f}_om{:04.2f}'.format(p['__index__'],p['InflowFile|HWindSpeed'],p['EDFile|BlPitch(1)'],p['EDFile|RotSpeed']) + i=i+1 + PARAMS.append(p) + return PARAMS + +def paramsLinearTrim(p=None): + p = dict() if p is None else p + + # Set a few DOFs, move this to main file + p['Linearize'] = True + p['CalcSteady'] = True + p['TrimGain'] = 1e-4 + p['TrimTol'] = 1e-5 + p['CompMooring'] = 0 + p['CompHydro'] = 0 + p['LinOutJac'] = False + p['LinOutMod'] = False + p['OutFmt'] = '"ES20.12E3"' # Important for decent resolution + + p['AeroFile|AFAeroMod'] = 1 + p['AeroFile|CavitCheck'] = 'False' + p['AeroFile|CompAA'] = 'False' + + p['ServoFile|PCMode'] = 0 + p['ServoFile|VSContrl'] = 1 + + p['ServoFile|CompNTMD'] = 'False' + p['ServoFile|CompTTMD'] = 'False' + + # Set all DOFs off, enable as desired + p['EDFile|FlapDOF1'] = 'False' + p['EDFile|FlapDOF2'] = 'False' + p['EDFile|EdgeDOF'] = 'False' + p['EDFile|TeetDOF'] = 'False' + p['EDFile|DrTrDOF'] = 'False' + p['EDFile|GenDOF'] = 'False' + p['EDFile|YawDOF'] = 'False' + p['EDFile|TwFADOF1'] = 'False' + p['EDFile|TwFADOF2'] = 'False' + p['EDFile|TwSSDOF1'] = 'False' + p['EDFile|TwSSDOF2'] = 'False' + p['EDFile|PtfmSgDOF'] = 'False' + p['EDFile|PtfmSwDOF'] = 'False' + p['EDFile|PtfmHvDOF'] = 'False' + p['EDFile|PtfmRDOF'] = 'False' + p['EDFile|PtfmPDOF'] = 'False' + p['EDFile|PtfmYDOF'] = 'False' + + + return p + +# --------------------------------------------------------------------------------} +# --- +# --------------------------------------------------------------------------------{ +def createStepWind(filename,WSstep=1,WSmin=3,WSmax=25,tstep=100,dt=0.5,tmin=0,tmax=999): + f = weio.FASTWndFile() + Steps= np.arange(WSmin,WSmax+WSstep,WSstep) + print(Steps) + nCol = len(f.colNames) + nRow = len(Steps)*2 + M = np.zeros((nRow,nCol)); + M[0,0] = tmin + M[0,1] = WSmin + for i,s in enumerate(Steps[:-1]): + M[2*i+1,0] = tmin + (i+1)*tstep-dt + M[2*i+2,0] = tmin + (i+1)*tstep + M[2*i+1,1] = Steps[i] + if i0: + main_fastfile=os.path.basename(main_fastfile) + + # --- Reading main fast file to get rotor radius + fst = fi.FASTInputFile(os.path.join(refdir,main_fastfile)) + ed = fi.FASTInputFile(os.path.join(refdir,fst['EDFile'].replace('"',''))) + R = ed['TipRad'] + + # --- Making sure we have + if (Omega is not None): + if (Lambda is not None): + WS = np.ones(Omega.shape)*WS_default + elif (WS is not None): + if len(WS)!=len(Omega): + raise Exception('When providing Omega and WS, both vectors should have the same dimension') + else: + WS = np.ones(Omega.shape)*WS_default + else: + Omega = WS_default * Lambda/R*60/(2*np.pi) # TODO, use more realistic combinations of WS and Omega + WS = np.ones(Omega.shape)*WS_default + + + # --- Defining flat vectors of operating conditions + WS_flat = [] + RPM_flat = [] + Pitch_flat = [] + for pitch in Pitch: + for (rpm,ws) in zip(Omega,WS): + WS_flat.append(ws) + RPM_flat.append(rpm) + Pitch_flat.append(pitch) + # --- Setting up default options + baseDict={'TMax': TMax, 'DT': 0.01, 'DT_Out': 0.1} # NOTE: Tmax should be at least 2pi/Omega + baseDict = paramsNoController(baseDict) + if bStiff: + baseDict = paramsStiff(baseDict) + if bNoGen: + baseDict = paramsNoGen(baseDict) + if bSteadyAero: + baseDict = paramsSteadyAero(baseDict) + + # --- Creating set of parameters to be changed + # TODO: verify that RtAeroCp and RtAeroCt are present in AeroDyn outlist + PARAMS = paramsWS_RPM_Pitch(WS_flat,RPM_flat,Pitch_flat,baseDict=baseDict, FlatInputs=True) + + # --- Generating all files in a workDir + workDir = refdir.strip('/').strip('\\')+'_CPLambdaPitch' + print('>>> Generating inputs files in {}'.format(workDir)) + RemoveAllowed=reRun # If the user want to rerun, we can remove, otherwise we keep existing simulations + fastFiles=templateReplace(PARAMS, refdir, outputDir=workDir,removeRefSubFiles=True,removeAllowed=RemoveAllowed,main_file=main_fastfile) + + # --- Running fast simulations + print('>>> Running {} simulations...'.format(len(fastFiles))) + runner.run_fastfiles(fastFiles, showOutputs=showOutputs, fastExe=fastExe, nCores=nCores, reRun=reRun) + + # --- Postpro - Computing averages at the end of the simluation + print('>>> Postprocessing...') + outFiles = [os.path.splitext(f)[0]+'.outb' for f in fastFiles] + # outFiles = glob.glob(os.path.join(workDir,'*.outb')) + ColKeepStats = ['RotSpeed_[rpm]','BldPitch1_[deg]','RtAeroCp_[-]','RtAeroCt_[-]','Wind1VelX_[m/s]'] + result = postpro.averagePostPro(outFiles,avgMethod='periods',avgParam=1,ColKeep=ColKeepStats,ColSort='RotSpeed_[rpm]') + # print(result) + + # --- Adding lambda, sorting and keeping only few columns + result['lambda_[-]'] = result['RotSpeed_[rpm]']*R*2*np.pi/60/result['Wind1VelX_[m/s]'] + result.sort_values(['lambda_[-]','BldPitch1_[deg]'],ascending=[True,True],inplace=True) + ColKeepFinal=['lambda_[-]','BldPitch1_[deg]','RtAeroCp_[-]','RtAeroCt_[-]'] + result=result[ColKeepFinal] + print('>>> Done') + + # --- Converting to a matrices + CP = result['RtAeroCp_[-]'].values + CT = result['RtAeroCt_[-]'].values + MCP =CP.reshape((len(Lambda),len(Pitch))) + MCT =CT.reshape((len(Lambda),len(Pitch))) + LAMBDA, PITCH = np.meshgrid(Lambda, Pitch) + # --- CP max + i,j = np.unravel_index(MCP.argmax(), MCP.shape) + MaxVal={'CP_max':MCP[i,j],'lambda_opt':LAMBDA[j,i],'pitch_opt':PITCH[j,i]} + + return MCP,MCT,Lambda,Pitch,MaxVal,result + + +if __name__=='__main__': + # --- Test of templateReplace + PARAMS = {} + PARAMS['TMax'] = 10 + PARAMS['__name__'] = 'MyName' + PARAMS['DT'] = 0.01 + PARAMS['DT_Out'] = 0.1 + PARAMS['EDFile|RotSpeed'] = 100 + PARAMS['EDFile|BlPitch(1)'] = 1 + PARAMS['EDFile|GBoxEff'] = 0.92 + PARAMS['ServoFile|VS_Rgn2K'] = 0.00038245 + PARAMS['ServoFile|GenEff'] = 0.95 + PARAMS['InflowFile|HWindSpeed'] = 8 + templateReplace(PARAMS,refDir,RemoveRefSubFiles=True) + diff --git a/pydatview/fast/fastfarm.py b/pydatview/fast/fastfarm.py index b09df03..950433a 100644 --- a/pydatview/fast/fastfarm.py +++ b/pydatview/fast/fastfarm.py @@ -1,482 +1,490 @@ -import os -import glob -import numpy as np -import pandas as pd -try: - import weio -except: - raise Exception('Python package `weio` not found, please install it from https://github.com/ebranlard/weio ') - -from . import fastlib - -# --------------------------------------------------------------------------------} -# --- Small helper functions -# --------------------------------------------------------------------------------{ -def insertTN(s,i,nWT=1000): - """ insert turbine number in name """ - if nWT<10: - fmt='{:d}' - elif nWT<100: - fmt='{:02d}' - else: - fmt='{:03d}' - if s.find('T1')>=0: - s=s.replace('T1','T'+fmt.format(i)) - else: - sp=os.path.splitext(s) - s=sp[0]+'_T'+fmt.format(i)+sp[1] - return s -def forceCopyFile (sfile, dfile): - # ---- Handling error due to wrong mod - if os.path.isfile(dfile): - if not os.access(dfile, os.W_OK): - os.chmod(dfile, stat.S_IWUSR) - #print(sfile, ' > ', dfile) - shutil.copy2(sfile, dfile) - -# --------------------------------------------------------------------------------} -# --- Tools to create fast farm simulations -# --------------------------------------------------------------------------------{ -def writeFSTandDLL(FstT1Name, nWT): - """ - Write FST files for each turbine, with different ServoDyn files and DLL - FST files, ServoFiles, and DLL files will be written next to their turbine 1 - files, with name Ti. - - FstT1Name: absolute or relative path to the Turbine FST file - """ - - FstT1Full = os.path.abspath(FstT1Name).replace('\\','/') - FstDir = os.path.dirname(FstT1Full) - - fst=weio.read(FstT1Name) - SrvT1Name = fst['ServoFile'].strip('"') - SrvT1Full = os.path.join(FstDir, SrvT1Name).replace('\\','/') - SrvDir = os.path.dirname(SrvT1Full) - SrvT1RelFst = os.path.relpath(SrvT1Full,FstDir) - if os.path.exists(SrvT1Full): - srv=weio.read(SrvT1Full) - DLLT1Name = srv['DLL_FileName'].strip('"') - DLLT1Full = os.path.join(SrvDir, DLLT1Name) - if os.path.exists(DLLT1Full): - servo=True - else: - print('[Info] DLL file not found, not copying servo and dll files ({})'.format(DLLT1Full)) - servo=False - else: - print('[Info] ServoDyn file not found, not copying servo and dll files ({})'.format(SrvT1Full)) - servo=False - - #print(FstDir) - #print(FstT1Full) - #print(SrvT1Name) - #print(SrvT1Full) - #print(SrvT1RelFst) - - for i in np.arange(2,nWT+1): - FstName = insertTN(FstT1Name,i,nWT) - if servo: - # TODO handle the case where T1 not present - SrvName = insertTN(SrvT1Name,i,nWT) - DLLName = insertTN(DLLT1Name,i,nWT) - DLLFullName = os.path.join(SrvDir, DLLName) - - print('') - print('FstName: ',FstName) - if servo: - print('SrvName: ',SrvName) - print('DLLName: ',DLLName) - print('DLLFull: ',DLLFullName) - - # Changing main file - if servo: - fst['ServoFile']='"'+SrvName+'"' - fst.write(FstName) - if servo: - # Changing servo file - srv['DLL_FileName']='"'+DLLName+'"' - srv.write(SrvName) - # Copying dll - forceCopyFile(DLLT1Full, DLLFullName) - - - -def rectangularLayoutSubDomains(D,Lx,Ly): - """ Retuns position of turbines in a rectangular layout - TODO, unfinished function parameters - """ - # --- Parameters - D = 112 # turbine diameter [m] - Lx = 3840 # x dimension of precusor - Ly = 3840 # y dimension of precusor - Height = 0 # Height above ground, likely 0 [m] - nDomains_x = 2 # number of domains in x - nDomains_y = 2 # number of domains in y - # --- 36 WT - nx = 3 # number of turbines to be placed along x in one precursor domain - ny = 3 # number of turbines to be placed along y in one precursor domain - StartX = 1/2 # How close do we start from the x boundary - StartY = 1/2 # How close do we start from the y boundary - # --- Derived parameters - Lx_Domain = Lx * nDomains_x # Full domain size - Ly_Domain = Ly * nDomains_y - DeltaX = Lx / (nx) # Turbine spacing - DeltaY = Ly / (ny) - xWT = np.arange(DeltaX*StartX,Lx_Domain,DeltaX) # Turbine positions - yWT = np.arange(DeltaY*StartY,Ly_Domain,DeltaY) - - print('Full domain size [D] : {:.2f} x {:.2f} '.format(Lx_Domain/D, Ly_Domain/D)) - print('Turbine spacing [D] : {:.2f} x {:.2f} '.format(DeltaX/D,DeltaX/D)) - print('Number of turbines : {:d} x {:d} = {:d}'.format(len(xWT),len(yWT),len(xWT)*len(yWT))) - - XWT,YWT=np.meshgrid(xWT,yWT) - ZWT=XWT*0+Height - - # --- Export coordinates only - M=np.column_stack((XWT.ravel(),YWT.ravel(),ZWT.ravel())) - np.savetxt('Farm_Coordinates.csv', M, delimiter=',',header='X_[m], Y_[m], Z_[m]') - print(M) - - return XWT, YWT, ZWT - -def fastFarmTurbSimExtent(TurbSimFile, HubHeight, D, xWT, yWT, Cmeander=1.9, Chord_max=3, extent_X=1.2, extent_Y=1.2): - """ - Determines "Ambient Wind" box parametesr for FastFarm, based on a TurbSimFile ('bts') - """ - # --- TurbSim data - ts = weio.read(TurbSimFile) - iy,iz = ts.closestPoint(y=0,z=HubHeight) - meanU = ts['u'][0,:,iy,iz].mean() - dY_High = ts['y'][1]-ts['y'][0] - dZ_High = ts['z'][1]-ts['z'][0] - Z0_Low = ts['z'][0] - Z0_High = ts['z'][0] # we start at lowest to include tower - Width = ts['y'][-1]-ts['y'][0] - Height = ts['z'][-1]-ts['z'][0] - dT_High = ts['dt'] - effSimLength = ts['t'][-1]-ts['t'][0] + Width/meanU - - # Desired resolution, rule of thumbs - dX_High_desired = Chord_max - dX_Low_desired = Cmeander*D*meanU/150.0 - dt_des = Cmeander*D/(10.0*meanU) - - # --- High domain - ZMax_High = HubHeight+extent_Y*D/2.0 - # high-box extent in x and y [D] - Xdist_High = extent_X*D - Ydist_High = extent_Y*D - Zdist_High = ZMax_High-Z0_High # we include the tower - X0_rel = Xdist_High/2.0 - Y0_rel = Ydist_High/2.0 - Length = effSimLength*meanU - nx = int(round(effSimLength/dT_High)) - dx_TS = Length/(nx-1) - dX_High = round(dX_High_desired/dx_TS)*dx_TS - - nX_High = int(round(Xdist_High/dX_High)+1) - nY_High = int(round(Ydist_High/dY_High)+1) - nZ_High = int(round(Zdist_High/dZ_High)+1) - - # --- High extent per turbine - nTurbs = len(xWT) - X0_des = np.asarray(xWT)-X0_rel - Y0_des = np.asarray(yWT)-Y0_rel - X0_High = np.around(np.round(X0_des/dX_High)*dX_High,3) - Y0_High = np.around(np.round(Y0_des/dY_High)*dY_High,3) - - # --- Low domain - dT_Low = round(dt_des/dT_High)*dT_High - dx_des = dX_Low_desired - dy_des = dX_Low_desired - dz_des = dX_Low_desired - X0_Low = min(xWT)-2*D - Y0_Low = -Width/2 - dX_Low = round(dx_des/dX_High)*dX_High - dY_Low = round(dy_des/dY_High)*dY_High - dZ_Low = round(dz_des/dZ_High)*dZ_High - Xdist = max(xWT)+8.0*D-X0_Low # Maximum extent - Ydist = Width - Zdist = Height - - nX_Low = int(Xdist/dX_Low)+1; - nY_Low = int(Ydist/dY_Low)+1; - nZ_Low = int(Zdist/dZ_Low)+1; - - if (nX_Low*dX_Low>Xdist): - nX_Low=nX_Low-1 - if (nY_Low*dY_Low>Ydist): - nY_Low=nY_Low-1 - if (nZ_Low*dZ_Low>Zdist): - nZ_Low=nZ_Low-1 - - d = dict() - d['DT'] = np.around(dT_Low ,3) - d['DT_High'] = np.around(dT_High,3) - d['NX_Low'] = int(nX_Low) - d['NY_Low'] = int(nY_Low) - d['NZ_Low'] = int(nZ_Low) - d['X0_Low'] = np.around(X0_Low,3) - d['Y0_Low'] = np.around(Y0_Low,3) - d['Z0_Low'] = np.around(Z0_Low,3) - d['dX_Low'] = np.around(dX_Low,3) - d['dY_Low'] = np.around(dY_Low,3) - d['dZ_Low'] = np.around(dZ_Low,3) - d['NX_High'] = int(nX_High) - d['NY_High'] = int(nY_High) - d['NZ_High'] = int(nZ_High) - # --- High extent info for turbine outputs - d['dX_High'] = np.around(dX_High,3) - d['dY_High'] = np.around(dY_High,3) - d['dZ_High'] = np.around(dZ_High,3) - d['X0_High'] = X0_High - d['Y0_High'] = Y0_High - d['Z0_High'] = np.around(Z0_High,3) - - return d - -def writeFastFarm(outputFile, templateFile, xWT, yWT, zWT, FFTS=None, OutListT1=None): - """ Write FastFarm input file based on a template, a TurbSimFile and the Layout - - outputFile: .fstf file to be written - templateFile: .fstf file that will be used to generate the output_file - XWT,YWT,ZWT: positions of turbines - FFTS: FastFarm TurbSim parameters as returned by fastFarmTurbSimExtent - """ - # --- Read template fast farm file - fst=weio.FASTInputFile(templateFile) - # --- Replace box extent values - if FFTS is not None: - fst['Mod_AmbWind'] = 2 - for k in ['DT', 'DT_High', 'NX_Low', 'NY_Low', 'NZ_Low', 'X0_Low', 'Y0_Low', 'Z0_Low', 'dX_Low', 'dY_Low', 'dZ_Low', 'NX_High', 'NY_High', 'NZ_High']: - if isinstance(FFTS[k],int): - fst[k] = FFTS[k] - else: - fst[k] = np.around(FFTS[k],3) - fst['WrDisDT'] = FFTS['DT'] - - # --- Set turbine names, position, and box extent - nWT = len(xWT) - fst['NumTurbines'] = nWT - if FFTS is not None: - nCol= 10 - else: - nCol = 4 - ref_path = fst['WindTurbines'][0,3] - WT = np.array(['']*nWT*nCol,dtype='object').reshape((nWT,nCol)) - for iWT,(x,y,z) in enumerate(zip(xWT,yWT,zWT)): - WT[iWT,0]=x - WT[iWT,1]=y - WT[iWT,2]=z - WT[iWT,3]=insertTN(ref_path,iWT+1,nWT) - if FFTS is not None: - WT[iWT,4]=FFTS['X0_High'][iWT] - WT[iWT,5]=FFTS['Y0_High'][iWT] - WT[iWT,6]=FFTS['Z0_High'] - WT[iWT,7]=FFTS['dX_High'] - WT[iWT,8]=FFTS['dY_High'] - WT[iWT,9]=FFTS['dZ_High'] - fst['WindTurbines']=WT - - fst.write(outputFile) - if OutListT1 is not None: - setFastFarmOutputs(outputFile, OutListT1) - -def setFastFarmOutputs(fastFarmFile, OutListT1): - """ Duplicate the output list, by replacing "T1" with T1->Tn """ - fst = weio.read(fastFarmFile) - nWTOut = min(fst['NumTurbines'],9) # Limited to 9 turbines - OutList=[''] - for s in OutListT1: - s=s.strip('"') - if s.find('T1'): - OutList+=['"'+s.replace('T1','T{:d}'.format(iWT+1))+'"' for iWT in np.arange(nWTOut) ] - else: - OutList+='"'+s+'"' - fst['OutList']=OutList - fst.write(fastFarmFile) - - -def plotFastFarmSetup(fastFarmFile): - """ """ - import matplotlib.pyplot as plt - fst=weio.FASTInputFile(fastFarmFile) - - fig = plt.figure(figsize=(13.5,10)) - ax = fig.add_subplot(111,aspect="equal") - - WT=fst['WindTurbines'] - x = WT[:,0].astype(float) - y = WT[:,1].astype(float) - - if fst['Mod_AmbWind'] == 2: - xmax_low = fst['X0_Low']+fst['DX_Low']*fst['NX_Low'] - ymax_low = fst['Y0_Low']+fst['DY_Low']*fst['NY_Low'] - # low-res box - ax.plot([fst['X0_Low'],xmax_low,xmax_low,fst['X0_Low'],fst['X0_Low']], - [fst['Y0_Low'],fst['Y0_Low'],ymax_low,ymax_low,fst['Y0_Low']],'--k',lw=2,label='Low') - X0_High = WT[:,4].astype(float) - Y0_High = WT[:,5].astype(float) - dX_High = WT[:,7].astype(float)[0] - dY_High = WT[:,8].astype(float)[0] - nX_High = fst['NX_High'] - nY_High = fst['NY_High'] - # high-res boxes - for wt in range(len(x)): - xmax_high = X0_High[wt]+dX_High*nX_High - ymax_high = Y0_High[wt]+dY_High*nY_High - ax.plot([X0_High[wt],xmax_high,xmax_high,X0_High[wt],X0_High[wt]], - [Y0_High[wt],Y0_High[wt],ymax_high,ymax_high,Y0_High[wt]], - '-', - label="HighT{0}".format(wt+1)) - ax.plot(x[wt],y[wt],'x',ms=8,mew=2,label="WT{0}".format(wt+1)) - else: - for wt in range(len(x)): - ax.plot(x[wt],y[wt],'x',ms=8,mew=2,label="WT{0}".format(wt+1)) - # - plt.legend(bbox_to_anchor=(1.05,1.015),frameon=False) - ax.set_xlabel("x-location [m]") - ax.set_ylabel("y-location [m]") - fig.tight_layout - # fig.savefig('FFarmLayout.pdf',bbox_to_inches='tight',dpi=500) - -# --------------------------------------------------------------------------------} -# --- Tools for postpro -# --------------------------------------------------------------------------------{ - -def spanwiseColFastFarm(Cols, nWT=9, nD=9): - """ Return column info, available columns and indices that contain AD spanwise data""" - FFSpanMap=dict() - for i in np.arange(nWT): - FFSpanMap['^CtT{:d}N(\d*)_\[-\]'.format(i+1)]='CtT{:d}_[-]'.format(i+1) - for i in np.arange(nWT): - for k in np.arange(nD): - FFSpanMap['^WkDfVxT{:d}N(\d*)D{:d}_\[m/s\]'.format(i+1,k+1) ]='WkDfVxT{:d}D{:d}_[m/s]'.format(i+1, k+1) - for i in np.arange(nWT): - for k in np.arange(nD): - FFSpanMap['^WkDfVrT{:d}N(\d*)D{:d}_\[m/s\]'.format(i+1,k+1) ]='WkDfVrT{:d}D{:d}_[m/s]'.format(i+1, k+1) - - return fastlib.find_matching_columns(Cols, FFSpanMap) - -def diameterwiseColFastFarm(Cols, nWT=9): - """ Return column info, available columns and indices that contain AD spanwise data""" - FFDiamMap=dict() - for i in np.arange(nWT): - for x in ['X','Y','Z']: - FFDiamMap['^WkAxs{}T{:d}D(\d*)_\[-\]'.format(x,i+1)] ='WkAxs{}T{:d}_[-]'.format(x,i+1) - for i in np.arange(nWT): - for x in ['X','Y','Z']: - FFDiamMap['^WkPos{}T{:d}D(\d*)_\[m\]'.format(x,i+1)] ='WkPos{}T{:d}_[m]'.format(x,i+1) - for i in np.arange(nWT): - for x in ['X','Y','Z']: - FFDiamMap['^WkVel{}T{:d}D(\d*)_\[m/s\]'.format(x,i+1)] ='WkVel{}T{:d}_[m/s]'.format(x,i+1) - for i in np.arange(nWT): - for x in ['X','Y','Z']: - FFDiamMap['^WkDiam{}T{:d}D(\d*)_\[m\]'.format(x,i+1)] ='WkDiam{}T{:d}_[m]'.format(x,i+1) - return fastlib.find_matching_columns(Cols, FFDiamMap) - -def SensorsFARMRadial(nWT=3,nD=10,nR=30,signals=None): - """ Returns a list of FASTFarm sensors that are used for the radial distribution - of quantities (e.g. Ct, Wake Deficits). - If `signals` is provided, the output is the list of sensors within the list `signals`. - """ - WT = np.arange(nWT) - r = np.arange(nR) - D = np.arange(nD) - sens=[] - sens+=['CtT{:d}N{:02d}_[-]'.format(i+1,j+1) for i in WT for j in r] - sens+=['WkDfVxT{:d}N{:02d}D{:d}_[m/s]'.format(i+1,j+1,k+1) for i in WT for j in r for k in D] - sens+=['WkDfVrT{:d}N{:02d}D{:d}_[m/s]'.format(i+1,j+1,k+1) for i in WT for j in r for k in D] - if signals is not None: - sens = [c for c in sens if c in signals] - return sens - -def SensorsFARMDiam(nWT,nD): - """ Returns a list of FASTFarm sensors that contain quantities at different downstream diameters - (e.g. WkAxs, WkPos, WkVel, WkDiam) - If `signals` is provided, the output is the list of sensors within the list `signals`. - """ - WT = np.arange(nWT) - D = np.arange(nD) - XYZ = ['X','Y','Z'] - sens=[] - sens+=['WkAxs{}T{:d}D{:d}_[-]'.format(x,i+1,j+1) for x in XYZ for i in WT for j in D] - sens+=['WkPos{}T{:d}D{:d}_[m]'.format(x,i+1,j+1) for x in XYZ for i in WT for j in D] - sens+=['WkVel{}T{:d}D{:d}_[m/s]'.format(x,i+1,j+1) for x in XYZ for i in WT for j in D] - sens+=['WkDiam{}T{:d}D{:d}_[m]'.format(x,i+1,j+1) for x in XYZ for i in WT for j in D] - if signals is not None: - sens = [c for c in sens if c in signals] - return sens - - -def extractFFRadialData(fastfarm_out,fastfarm_input,avgMethod='constantwindow',avgParam=30,D=1,df=None): - # LEGACY - return spanwisePostProFF(fastfarm_input,avgMethod=avgMethod,avgParam=avgParam,D=D,df=df,fastfarm_out=fastfarm_out) - - -def spanwisePostProFF(fastfarm_input,avgMethod='constantwindow',avgParam=30,D=1,df=None,fastfarm_out=None): - """ - Opens a FASTFarm output file, extract the radial data, average them and returns spanwise data - - D: diameter TODO, extract it from the main file - - See faslibt.averageDF for `avgMethod` and `avgParam`. - """ - # --- Opening ouputfile - if df is None: - df=weio.read(fastfarm_out).toDataFrame() - - # --- Opening input file and extracting inportant variables - if fastfarm_input is None: - # We don't have an input file, guess numbers of turbine, diameters, Nodes... - cols, sIdx = fastlib.find_matching_pattern(df.columns.values, 'T(\d+)') - nWT = np.array(sIdx).astype(int).max() - cols, sIdx = fastlib.find_matching_pattern(df.columns.values, 'D(\d+)') - nD = np.array(sIdx).astype(int).max() - cols, sIdx = fastlib.find_matching_pattern(df.columns.values, 'N(\d+)') - nr = np.array(sIdx).astype(int).max() - vr=None - vD=None - D=0 - else: - main=weio.FASTInputFile(fastfarm_input) - iOut = main['OutRadii'] - dr = main['dr'] # Radial increment of radial finite-difference grid (m) - OutDist = main['OutDist'] # List of downstream distances for wake output for an individual rotor - WT = main['WindTurbines'] - nWT = len(WT) - vr = dr*np.array(iOut) - vD = np.array(OutDist) - nr=len(iOut) - nD=len(vD) - - - # --- Extracting time series of radial data only - colRadial = SensorsFARMRadial(nWT=nWT,nD=nD,nR=nr,signals=df.columns.values) - colRadial=['Time_[s]']+colRadial - dfRadialTime = df[colRadial] # TODO try to do some magic with it, display it with a slider - - # --- Averaging data - dfAvg = fastlib.averageDF(df,avgMethod=avgMethod,avgParam=avgParam) - - # --- Extract radial data - ColsInfo, nrMax = spanwiseColFastFarm(df.columns.values, nWT=nWT, nD=nD) - dfRad = fastlib.extract_spanwise_data(ColsInfo, nrMax, df=None, ts=dfAvg.iloc[0]) - #dfRad = fastlib.insert_radial_columns(dfRad, vr) - if vr is None: - dfRad.insert(0, 'i_[#]', np.arange(nrMax)+1) - else: - dfRad.insert(0, 'r_[m]', vr[:nrMax]) - dfRad['i/n_[-]']=np.arange(nrMax)/nrMax - - # --- Extract downstream data - ColsInfo, nDMax = diameterwiseColFastFarm(df.columns.values, nWT=nWT) - dfDiam = fastlib.extract_spanwise_data(ColsInfo, nDMax, df=None, ts=dfAvg.iloc[0]) - #dfDiam = fastlib.insert_radial_columns(dfDiam) - if vD is None: - dfDiam.insert(0, 'i_[#]', np.arange(nDMax)+1) - else: - dfDiam.insert(0, 'x_[m]', vD[:nDMax]) - dfDiam['i/n_[-]'] = np.arange(nDMax)/nDMax - return dfRad, dfRadialTime, dfDiam - +import os +import glob +import numpy as np +import pandas as pd +try: + import weio +except: + raise Exception('Python package `weio` not found, please install it from https://github.com/ebranlard/weio ') + +from . import fastlib + +# --------------------------------------------------------------------------------} +# --- Small helper functions +# --------------------------------------------------------------------------------{ +def insertTN(s,i,nWT=1000): + """ insert turbine number in name """ + if nWT<10: + fmt='{:d}' + elif nWT<100: + fmt='{:02d}' + else: + fmt='{:03d}' + if s.find('T1')>=0: + s=s.replace('T1','T'+fmt.format(i)) + else: + sp=os.path.splitext(s) + s=sp[0]+'_T'+fmt.format(i)+sp[1] + return s +def forceCopyFile (sfile, dfile): + # ---- Handling error due to wrong mod + if os.path.isfile(dfile): + if not os.access(dfile, os.W_OK): + os.chmod(dfile, stat.S_IWUSR) + #print(sfile, ' > ', dfile) + shutil.copy2(sfile, dfile) + +# --------------------------------------------------------------------------------} +# --- Tools to create fast farm simulations +# --------------------------------------------------------------------------------{ +def writeFSTandDLL(FstT1Name, nWT): + """ + Write FST files for each turbine, with different ServoDyn files and DLL + FST files, ServoFiles, and DLL files will be written next to their turbine 1 + files, with name Ti. + + FstT1Name: absolute or relative path to the Turbine FST file + """ + + FstT1Full = os.path.abspath(FstT1Name).replace('\\','/') + FstDir = os.path.dirname(FstT1Full) + + fst=weio.read(FstT1Name) + SrvT1Name = fst['ServoFile'].strip('"') + SrvT1Full = os.path.join(FstDir, SrvT1Name).replace('\\','/') + SrvDir = os.path.dirname(SrvT1Full) + SrvT1RelFst = os.path.relpath(SrvT1Full,FstDir) + if os.path.exists(SrvT1Full): + srv=weio.read(SrvT1Full) + DLLT1Name = srv['DLL_FileName'].strip('"') + DLLT1Full = os.path.join(SrvDir, DLLT1Name) + if os.path.exists(DLLT1Full): + servo=True + else: + print('[Info] DLL file not found, not copying servo and dll files ({})'.format(DLLT1Full)) + servo=False + else: + print('[Info] ServoDyn file not found, not copying servo and dll files ({})'.format(SrvT1Full)) + servo=False + + #print(FstDir) + #print(FstT1Full) + #print(SrvT1Name) + #print(SrvT1Full) + #print(SrvT1RelFst) + + for i in np.arange(2,nWT+1): + FstName = insertTN(FstT1Name,i,nWT) + if servo: + # TODO handle the case where T1 not present + SrvName = insertTN(SrvT1Name,i,nWT) + DLLName = insertTN(DLLT1Name,i,nWT) + DLLFullName = os.path.join(SrvDir, DLLName) + + print('') + print('FstName: ',FstName) + if servo: + print('SrvName: ',SrvName) + print('DLLName: ',DLLName) + print('DLLFull: ',DLLFullName) + + # Changing main file + if servo: + fst['ServoFile']='"'+SrvName+'"' + fst.write(FstName) + if servo: + # Changing servo file + srv['DLL_FileName']='"'+DLLName+'"' + srv.write(SrvName) + # Copying dll + forceCopyFile(DLLT1Full, DLLFullName) + + + +def rectangularLayoutSubDomains(D,Lx,Ly): + """ Retuns position of turbines in a rectangular layout + TODO, unfinished function parameters + """ + # --- Parameters + D = 112 # turbine diameter [m] + Lx = 3840 # x dimension of precusor + Ly = 3840 # y dimension of precusor + Height = 0 # Height above ground, likely 0 [m] + nDomains_x = 2 # number of domains in x + nDomains_y = 2 # number of domains in y + # --- 36 WT + nx = 3 # number of turbines to be placed along x in one precursor domain + ny = 3 # number of turbines to be placed along y in one precursor domain + StartX = 1/2 # How close do we start from the x boundary + StartY = 1/2 # How close do we start from the y boundary + # --- Derived parameters + Lx_Domain = Lx * nDomains_x # Full domain size + Ly_Domain = Ly * nDomains_y + DeltaX = Lx / (nx) # Turbine spacing + DeltaY = Ly / (ny) + xWT = np.arange(DeltaX*StartX,Lx_Domain,DeltaX) # Turbine positions + yWT = np.arange(DeltaY*StartY,Ly_Domain,DeltaY) + + print('Full domain size [D] : {:.2f} x {:.2f} '.format(Lx_Domain/D, Ly_Domain/D)) + print('Turbine spacing [D] : {:.2f} x {:.2f} '.format(DeltaX/D,DeltaX/D)) + print('Number of turbines : {:d} x {:d} = {:d}'.format(len(xWT),len(yWT),len(xWT)*len(yWT))) + + XWT,YWT=np.meshgrid(xWT,yWT) + ZWT=XWT*0+Height + + # --- Export coordinates only + M=np.column_stack((XWT.ravel(),YWT.ravel(),ZWT.ravel())) + np.savetxt('Farm_Coordinates.csv', M, delimiter=',',header='X_[m], Y_[m], Z_[m]') + print(M) + + return XWT, YWT, ZWT + +def fastFarmTurbSimExtent(TurbSimFile, HubHeight, D, xWT, yWT, Cmeander=1.9, Chord_max=3, extent_X=1.2, extent_Y=1.2): + """ + Determines "Ambient Wind" box parametesr for FastFarm, based on a TurbSimFile ('bts') + """ + # --- TurbSim data + ts = weio.read(TurbSimFile) + #iy,iz = ts.closestPoint(y=0,z=HubHeight) + #iy,iz = ts.closestPoint(y=0,z=HubHeight) + zMid, uMid = ts.midValues() + #print('uMid',uMid) + #meanU = ts['u'][0,:,iy,iz].mean() + meanU = uMid + dY_High = ts['y'][1]-ts['y'][0] + dZ_High = ts['z'][1]-ts['z'][0] + Z0_Low = ts['z'][0] + Z0_High = ts['z'][0] # we start at lowest to include tower + Width = ts['y'][-1]-ts['y'][0] + Height = ts['z'][-1]-ts['z'][0] + dT_High = ts['dt'] + #effSimLength = ts['t'][-1]-ts['t'][0] + Width/meanU + effSimLength = ts['t'][-1]-ts['t'][0] + + # Desired resolution, rule of thumbs + dX_High_desired = Chord_max + dX_Low_desired = Cmeander*D*meanU/150.0 + dt_des = Cmeander*D/(10.0*meanU) + + # --- High domain + ZMax_High = HubHeight+extent_Y*D/2.0 + # high-box extent in x and y [D] + Xdist_High = extent_X*D + Ydist_High = extent_Y*D + Zdist_High = ZMax_High-Z0_High # we include the tower + X0_rel = Xdist_High/2.0 + Y0_rel = Ydist_High/2.0 + Length = effSimLength*meanU + nx = int(round(effSimLength/dT_High)) + dx_TS = Length/(nx-1) + #print('dx_TS',dx_TS) + dX_High = round(dX_High_desired/dx_TS)*dx_TS + #print('dX_High_desired',dX_High_desired, dX_High) + + nX_High = int(round(Xdist_High/dX_High)+1) + nY_High = int(round(Ydist_High/dY_High)+1) + nZ_High = int(round(Zdist_High/dZ_High)+1) + + # --- High extent per turbine + nTurbs = len(xWT) + X0_des = np.asarray(xWT)-X0_rel + Y0_des = np.asarray(yWT)-Y0_rel + X0_High = np.around(np.round(X0_des/dX_High)*dX_High,3) + Y0_High = np.around(np.round(Y0_des/dY_High)*dY_High,3) + + # --- Low domain + dT_Low = round(dt_des/dT_High)*dT_High + dx_des = dX_Low_desired + dy_des = dX_Low_desired + dz_des = dX_Low_desired + X0_Low = round( (min(xWT)-2*D)/dX_High) *dX_High + Y0_Low = round( -Width/2 /dY_High) *dY_High + dX_Low = round( dx_des /dX_High)*dX_High + dY_Low = round( dy_des /dY_High)*dY_High + dZ_Low = round( dz_des /dZ_High)*dZ_High + Xdist = max(xWT)+8.0*D-X0_Low # Maximum extent + Ydist = Width + Zdist = Height + #print('dX_Low',dX_Low, dX_Low/dx_TS, dX_High/dx_TS) + + nX_Low = int(Xdist/dX_Low)+1; + nY_Low = int(Ydist/dY_Low)+1; + nZ_Low = int(Zdist/dZ_Low)+1; + + if (nX_Low*dX_Low>Xdist): + nX_Low=nX_Low-1 + if (nY_Low*dY_Low>Ydist): + nY_Low=nY_Low-1 + if (nZ_Low*dZ_Low>Zdist): + nZ_Low=nZ_Low-1 + + d = dict() + d['DT'] = np.around(dT_Low ,3) + d['DT_High'] = np.around(dT_High,3) + d['NX_Low'] = int(nX_Low) + d['NY_Low'] = int(nY_Low) + d['NZ_Low'] = int(nZ_Low) + d['X0_Low'] = np.around(X0_Low,3) + d['Y0_Low'] = np.around(Y0_Low,3) + d['Z0_Low'] = np.around(Z0_Low,3) + d['dX_Low'] = np.around(dX_Low,3) + d['dY_Low'] = np.around(dY_Low,3) + d['dZ_Low'] = np.around(dZ_Low,3) + d['NX_High'] = int(nX_High) + d['NY_High'] = int(nY_High) + d['NZ_High'] = int(nZ_High) + # --- High extent info for turbine outputs + d['dX_High'] = np.around(dX_High,3) + d['dY_High'] = np.around(dY_High,3) + d['dZ_High'] = np.around(dZ_High,3) + d['X0_High'] = X0_High + d['Y0_High'] = Y0_High + d['Z0_High'] = np.around(Z0_High,3) + + return d + +def writeFastFarm(outputFile, templateFile, xWT, yWT, zWT, FFTS=None, OutListT1=None): + """ Write FastFarm input file based on a template, a TurbSimFile and the Layout + + outputFile: .fstf file to be written + templateFile: .fstf file that will be used to generate the output_file + XWT,YWT,ZWT: positions of turbines + FFTS: FastFarm TurbSim parameters as returned by fastFarmTurbSimExtent + """ + # --- Read template fast farm file + fst=weio.FASTInputFile(templateFile) + # --- Replace box extent values + if FFTS is not None: + fst['Mod_AmbWind'] = 2 + for k in ['DT', 'DT_High', 'NX_Low', 'NY_Low', 'NZ_Low', 'X0_Low', 'Y0_Low', 'Z0_Low', 'dX_Low', 'dY_Low', 'dZ_Low', 'NX_High', 'NY_High', 'NZ_High']: + if isinstance(FFTS[k],int): + fst[k] = FFTS[k] + else: + fst[k] = np.around(FFTS[k],3) + fst['WrDisDT'] = FFTS['DT'] + + # --- Set turbine names, position, and box extent + nWT = len(xWT) + fst['NumTurbines'] = nWT + if FFTS is not None: + nCol= 10 + else: + nCol = 4 + ref_path = fst['WindTurbines'][0,3] + WT = np.array(['']*nWT*nCol,dtype='object').reshape((nWT,nCol)) + for iWT,(x,y,z) in enumerate(zip(xWT,yWT,zWT)): + WT[iWT,0]=x + WT[iWT,1]=y + WT[iWT,2]=z + WT[iWT,3]=insertTN(ref_path,iWT+1,nWT) + if FFTS is not None: + WT[iWT,4]=FFTS['X0_High'][iWT] + WT[iWT,5]=FFTS['Y0_High'][iWT] + WT[iWT,6]=FFTS['Z0_High'] + WT[iWT,7]=FFTS['dX_High'] + WT[iWT,8]=FFTS['dY_High'] + WT[iWT,9]=FFTS['dZ_High'] + fst['WindTurbines']=WT + + fst.write(outputFile) + if OutListT1 is not None: + setFastFarmOutputs(outputFile, OutListT1) + +def setFastFarmOutputs(fastFarmFile, OutListT1): + """ Duplicate the output list, by replacing "T1" with T1->Tn """ + fst = weio.read(fastFarmFile) + nWTOut = min(fst['NumTurbines'],9) # Limited to 9 turbines + OutList=[''] + for s in OutListT1: + s=s.strip('"') + if s.find('T1'): + OutList+=['"'+s.replace('T1','T{:d}'.format(iWT+1))+'"' for iWT in np.arange(nWTOut) ] + else: + OutList+='"'+s+'"' + fst['OutList']=OutList + fst.write(fastFarmFile) + + +def plotFastFarmSetup(fastFarmFile): + """ """ + import matplotlib.pyplot as plt + fst=weio.FASTInputFile(fastFarmFile) + + fig = plt.figure(figsize=(13.5,10)) + ax = fig.add_subplot(111,aspect="equal") + + WT=fst['WindTurbines'] + x = WT[:,0].astype(float) + y = WT[:,1].astype(float) + + if fst['Mod_AmbWind'] == 2: + xmax_low = fst['X0_Low']+fst['DX_Low']*fst['NX_Low'] + ymax_low = fst['Y0_Low']+fst['DY_Low']*fst['NY_Low'] + # low-res box + ax.plot([fst['X0_Low'],xmax_low,xmax_low,fst['X0_Low'],fst['X0_Low']], + [fst['Y0_Low'],fst['Y0_Low'],ymax_low,ymax_low,fst['Y0_Low']],'--k',lw=2,label='Low') + X0_High = WT[:,4].astype(float) + Y0_High = WT[:,5].astype(float) + dX_High = WT[:,7].astype(float)[0] + dY_High = WT[:,8].astype(float)[0] + nX_High = fst['NX_High'] + nY_High = fst['NY_High'] + # high-res boxes + for wt in range(len(x)): + xmax_high = X0_High[wt]+dX_High*nX_High + ymax_high = Y0_High[wt]+dY_High*nY_High + ax.plot([X0_High[wt],xmax_high,xmax_high,X0_High[wt],X0_High[wt]], + [Y0_High[wt],Y0_High[wt],ymax_high,ymax_high,Y0_High[wt]], + '-', + label="HighT{0}".format(wt+1)) + ax.plot(x[wt],y[wt],'x',ms=8,mew=2,label="WT{0}".format(wt+1)) + else: + for wt in range(len(x)): + ax.plot(x[wt],y[wt],'x',ms=8,mew=2,label="WT{0}".format(wt+1)) + # + plt.legend(bbox_to_anchor=(1.05,1.015),frameon=False) + ax.set_xlabel("x-location [m]") + ax.set_ylabel("y-location [m]") + fig.tight_layout + # fig.savefig('FFarmLayout.pdf',bbox_to_inches='tight',dpi=500) + +# --------------------------------------------------------------------------------} +# --- Tools for postpro +# --------------------------------------------------------------------------------{ + +def spanwiseColFastFarm(Cols, nWT=9, nD=9): + """ Return column info, available columns and indices that contain AD spanwise data""" + FFSpanMap=dict() + for i in np.arange(nWT): + FFSpanMap['^CtT{:d}N(\d*)_\[-\]'.format(i+1)]='CtT{:d}_[-]'.format(i+1) + for i in np.arange(nWT): + for k in np.arange(nD): + FFSpanMap['^WkDfVxT{:d}N(\d*)D{:d}_\[m/s\]'.format(i+1,k+1) ]='WkDfVxT{:d}D{:d}_[m/s]'.format(i+1, k+1) + for i in np.arange(nWT): + for k in np.arange(nD): + FFSpanMap['^WkDfVrT{:d}N(\d*)D{:d}_\[m/s\]'.format(i+1,k+1) ]='WkDfVrT{:d}D{:d}_[m/s]'.format(i+1, k+1) + + return fastlib.find_matching_columns(Cols, FFSpanMap) + +def diameterwiseColFastFarm(Cols, nWT=9): + """ Return column info, available columns and indices that contain AD spanwise data""" + FFDiamMap=dict() + for i in np.arange(nWT): + for x in ['X','Y','Z']: + FFDiamMap['^WkAxs{}T{:d}D(\d*)_\[-\]'.format(x,i+1)] ='WkAxs{}T{:d}_[-]'.format(x,i+1) + for i in np.arange(nWT): + for x in ['X','Y','Z']: + FFDiamMap['^WkPos{}T{:d}D(\d*)_\[m\]'.format(x,i+1)] ='WkPos{}T{:d}_[m]'.format(x,i+1) + for i in np.arange(nWT): + for x in ['X','Y','Z']: + FFDiamMap['^WkVel{}T{:d}D(\d*)_\[m/s\]'.format(x,i+1)] ='WkVel{}T{:d}_[m/s]'.format(x,i+1) + for i in np.arange(nWT): + for x in ['X','Y','Z']: + FFDiamMap['^WkDiam{}T{:d}D(\d*)_\[m\]'.format(x,i+1)] ='WkDiam{}T{:d}_[m]'.format(x,i+1) + return fastlib.find_matching_columns(Cols, FFDiamMap) + +def SensorsFARMRadial(nWT=3,nD=10,nR=30,signals=None): + """ Returns a list of FASTFarm sensors that are used for the radial distribution + of quantities (e.g. Ct, Wake Deficits). + If `signals` is provided, the output is the list of sensors within the list `signals`. + """ + WT = np.arange(nWT) + r = np.arange(nR) + D = np.arange(nD) + sens=[] + sens+=['CtT{:d}N{:02d}_[-]'.format(i+1,j+1) for i in WT for j in r] + sens+=['WkDfVxT{:d}N{:02d}D{:d}_[m/s]'.format(i+1,j+1,k+1) for i in WT for j in r for k in D] + sens+=['WkDfVrT{:d}N{:02d}D{:d}_[m/s]'.format(i+1,j+1,k+1) for i in WT for j in r for k in D] + if signals is not None: + sens = [c for c in sens if c in signals] + return sens + +def SensorsFARMDiam(nWT,nD): + """ Returns a list of FASTFarm sensors that contain quantities at different downstream diameters + (e.g. WkAxs, WkPos, WkVel, WkDiam) + If `signals` is provided, the output is the list of sensors within the list `signals`. + """ + WT = np.arange(nWT) + D = np.arange(nD) + XYZ = ['X','Y','Z'] + sens=[] + sens+=['WkAxs{}T{:d}D{:d}_[-]'.format(x,i+1,j+1) for x in XYZ for i in WT for j in D] + sens+=['WkPos{}T{:d}D{:d}_[m]'.format(x,i+1,j+1) for x in XYZ for i in WT for j in D] + sens+=['WkVel{}T{:d}D{:d}_[m/s]'.format(x,i+1,j+1) for x in XYZ for i in WT for j in D] + sens+=['WkDiam{}T{:d}D{:d}_[m]'.format(x,i+1,j+1) for x in XYZ for i in WT for j in D] + if signals is not None: + sens = [c for c in sens if c in signals] + return sens + + +def extractFFRadialData(fastfarm_out,fastfarm_input,avgMethod='constantwindow',avgParam=30,D=1,df=None): + # LEGACY + return spanwisePostProFF(fastfarm_input,avgMethod=avgMethod,avgParam=avgParam,D=D,df=df,fastfarm_out=fastfarm_out) + + +def spanwisePostProFF(fastfarm_input,avgMethod='constantwindow',avgParam=30,D=1,df=None,fastfarm_out=None): + """ + Opens a FASTFarm output file, extract the radial data, average them and returns spanwise data + + D: diameter TODO, extract it from the main file + + See faslibt.averageDF for `avgMethod` and `avgParam`. + """ + # --- Opening ouputfile + if df is None: + df=weio.read(fastfarm_out).toDataFrame() + + # --- Opening input file and extracting inportant variables + if fastfarm_input is None: + # We don't have an input file, guess numbers of turbine, diameters, Nodes... + cols, sIdx = fastlib.find_matching_pattern(df.columns.values, 'T(\d+)') + nWT = np.array(sIdx).astype(int).max() + cols, sIdx = fastlib.find_matching_pattern(df.columns.values, 'D(\d+)') + nD = np.array(sIdx).astype(int).max() + cols, sIdx = fastlib.find_matching_pattern(df.columns.values, 'N(\d+)') + nr = np.array(sIdx).astype(int).max() + vr=None + vD=None + D=0 + else: + main=weio.FASTInputFile(fastfarm_input) + iOut = main['OutRadii'] + dr = main['dr'] # Radial increment of radial finite-difference grid (m) + OutDist = main['OutDist'] # List of downstream distances for wake output for an individual rotor + WT = main['WindTurbines'] + nWT = len(WT) + vr = dr*np.array(iOut) + vD = np.array(OutDist) + nr=len(iOut) + nD=len(vD) + + + # --- Extracting time series of radial data only + colRadial = SensorsFARMRadial(nWT=nWT,nD=nD,nR=nr,signals=df.columns.values) + colRadial=['Time_[s]']+colRadial + dfRadialTime = df[colRadial] # TODO try to do some magic with it, display it with a slider + + # --- Averaging data + dfAvg = fastlib.averageDF(df,avgMethod=avgMethod,avgParam=avgParam) + + # --- Extract radial data + ColsInfo, nrMax = spanwiseColFastFarm(df.columns.values, nWT=nWT, nD=nD) + dfRad = fastlib.extract_spanwise_data(ColsInfo, nrMax, df=None, ts=dfAvg.iloc[0]) + #dfRad = fastlib.insert_radial_columns(dfRad, vr) + if vr is None: + dfRad.insert(0, 'i_[#]', np.arange(nrMax)+1) + else: + dfRad.insert(0, 'r_[m]', vr[:nrMax]) + dfRad['i/n_[-]']=np.arange(nrMax)/nrMax + + # --- Extract downstream data + ColsInfo, nDMax = diameterwiseColFastFarm(df.columns.values, nWT=nWT) + dfDiam = fastlib.extract_spanwise_data(ColsInfo, nDMax, df=None, ts=dfAvg.iloc[0]) + #dfDiam = fastlib.insert_radial_columns(dfDiam) + if vD is None: + dfDiam.insert(0, 'i_[#]', np.arange(nDMax)+1) + else: + dfDiam.insert(0, 'x_[m]', vD[:nDMax]) + dfDiam['i/n_[-]'] = np.arange(nDMax)/nDMax + return dfRad, dfRadialTime, dfDiam + diff --git a/pydatview/fast/postpro.py b/pydatview/fast/postpro.py index 7b13e70..d27a397 100644 --- a/pydatview/fast/postpro.py +++ b/pydatview/fast/postpro.py @@ -1,1196 +1,1367 @@ -# --- For cmd.py -from __future__ import division, print_function -import os -import pandas as pd -import numpy as np -import re - -# --- fast libraries -from weio.weio.fast_input_file import FASTInputFile -from weio.weio.fast_output_file import FASTOutputFile -from weio.weio.fast_input_deck import FASTInputDeck -# from pyFAST.input_output.fast_input_file import FASTInputFile -# from pyFAST.input_output.fast_output_file import FASTOutputFile -# from pyFAST.input_output.fast_input_deck import FASTInputDeck - -# --------------------------------------------------------------------------------} -# --- Tools for IO -# --------------------------------------------------------------------------------{ -def ED_BldStations(ED): - """ Returns ElastoDyn Blade Station positions, useful to know where the outputs are. - INPUTS: - - ED: either: - - a filename of a ElastoDyn input file - - an instance of FileCl, as returned by reading the file, ED = weio.read(ED_filename) - - OUTUPTS: - - bld_fract: fraction of the blade length were stations are defined - - r_nodes: spanwise position from the rotor apex of the Blade stations - """ - if hasattr(ED,'startswith'): # if string - ED = FASTInputFile(ED) - - nBldNodes = ED['BldNodes'] - bld_fract = np.arange(1./nBldNodes/2., 1, 1./nBldNodes) - r_nodes = bld_fract*(ED['TipRad']-ED['HubRad']) + ED['HubRad'] - return bld_fract, r_nodes - -def ED_TwrStations(ED): - """ Returns ElastoDyn Tower Station positions, useful to know where the outputs are. - INPUTS: - - ED: either: - - a filename of a ElastoDyn input file - - an instance of FileCl, as returned by reading the file, ED = weio.read(ED_filename) - - OUTPUTS: - - r_fract: fraction of the towet length were stations are defined - - h_nodes: height from the *ground* of the stations (not from the Tower base) - """ - if hasattr(ED,'startswith'): # if string - ED = FASTInputFile(ED) - - nTwrNodes = ED['TwrNodes'] - twr_fract = np.arange(1./nTwrNodes/2., 1, 1./nTwrNodes) - h_nodes = twr_fract*(ED['TowerHt']-ED['TowerBsHt']) + ED['TowerBsHt'] - return twr_fract, h_nodes - -def ED_BldGag(ED): - """ Returns the radial position of ElastoDyn blade gages - INPUTS: - - ED: either: - - a filename of a ElastoDyn input file - - an instance of FileCl, as returned by reading the file, ED = weio.read(ED_filename) - OUTPUTS: - - r_gag: The radial positions of the gages, given from the rotor apex - """ - if hasattr(ED,'startswith'): # if string - ED = FASTInputFile(ED) - _,r_nodes= ED_BldStations(ED) - - # if ED.hasNodal: - # return r_nodes, None - nOuts = ED['NBlGages'] - if nOuts<=0: - return np.array([]), np.array([]) - if type(ED['BldGagNd']) is list: - Inodes = np.asarray(ED['BldGagNd']) - else: - Inodes = np.array([ED['BldGagNd']]) - r_gag = r_nodes[ Inodes[:nOuts] -1] - return r_gag, Inodes - -def ED_TwrGag(ED): - """ Returns the heights of ElastoDyn blade gages - INPUTS: - - ED: either: - - a filename of a ElastoDyn input file - - an instance of FileCl, as returned by reading the file, ED = weio.read(ED_filename) - OUTPUTS: - - h_gag: The heights of the gages, given from the ground height (tower base + TowerBsHt) - """ - if hasattr(ED,'startswith'): # if string - ED = FASTInputFile(ED) - _,h_nodes= ED_TwrStations(ED) - nOuts = ED['NTwGages'] - if nOuts<=0: - return np.array([]) - if type(ED['TwrGagNd']) is list: - Inodes = np.asarray(ED['TwrGagNd']) - else: - Inodes = np.array([ED['TwrGagNd']]) - h_gag = h_nodes[ Inodes[:nOuts] -1] - return h_gag - - -def AD14_BldGag(AD): - """ Returns the radial position of AeroDyn 14 blade gages (based on "print" in column 6) - INPUTS: - - AD: either: - - a filename of a AeroDyn input file - - an instance of FileCl, as returned by reading the file, AD = weio.read(AD_filename) - OUTPUTS: - - r_gag: The radial positions of the gages, given from the blade root - """ - if hasattr(ED,'startswith'): # if string - AD = FASTInputFile(AD) - - Nodes=AD['BldAeroNodes'] - if Nodes.shape[1]==6: - doPrint= np.array([ n.lower().find('p')==0 for n in Nodes[:,5]]) - else: - doPrint=np.array([ True for n in Nodes[:,0]]) - - r_gag = Nodes[doPrint,0].astype(float) - IR = np.arange(1,len(Nodes)+1)[doPrint] - return r_gag, IR - -def AD_BldGag(AD,AD_bld,chordOut=False): - """ Returns the radial position of AeroDyn blade gages - INPUTS: - - AD: either: - - a filename of a AeroDyn input file - - an instance of FileCl, as returned by reading the file, AD = weio.read(AD_filename) - - AD_bld: either: - - a filename of a AeroDyn Blade input file - - an instance of FileCl, as returned by reading the file, AD_bld = weio.read(AD_bld_filename) - OUTPUTS: - - r_gag: The radial positions of the gages, given from the blade root - """ - if hasattr(AD,'startswith'): # if string - AD = FASTInputFile(AD) - if hasattr(AD_bld,'startswith'): # if string - AD_bld = FASTInputFile(AD_bld) - #print(AD_bld.keys()) - - nOuts=AD['NBlOuts'] - if nOuts<=0: - if chordOut: - return np.array([]), np.array([]) - else: - return np.array([]) - INodes = np.array(AD['BlOutNd'][:nOuts]) - r_gag = AD_bld['BldAeroNodes'][INodes-1,0] - if chordOut: - chord_gag = AD_bld['BldAeroNodes'][INodes-1,5] - return r_gag,chord_gag - else: - return r_gag - -def BD_BldGag(BD): - """ Returns the radial position of BeamDyn blade gages - INPUTS: - - BD: either: - - a filename of a BeamDyn input file - - an instance of FileCl, as returned by reading the file, BD = weio.read(BD_filename) - OUTPUTS: - - r_gag: The radial positions of the gages, given from the rotor apex - """ - if hasattr(BD,'startswith'): # if string - BD = FASTInputFile(BD) - - M = BD['MemberGeom'] - r_nodes = M[:,2] # NOTE: we select the z axis here, and we don't take curvilenear coord - nOuts = BD['NNodeOuts'] - if nOuts<=0: - nOuts=0 - if type(BD['OutNd']) is list: - Inodes = np.asarray(BD['OutNd']) - else: - Inodes = np.array([BD['OutNd']]) - r_gag = r_nodes[ Inodes[:nOuts] -1] - return r_gag, Inodes, r_nodes - -# -# -# 1, 7, 14, 21, 30, 36, 43, 52, 58 BldGagNd List of blade nodes that have strain gages [1 to BldNodes] (-) [unused if NBlGages=0] - -# --------------------------------------------------------------------------------} -# --- Helper functions for radial data -# --------------------------------------------------------------------------------{ -def _HarmonizeSpanwiseData(Name, Columns, vr, R, IR=None) : - """ helper function to use with spanwiseAD and spanwiseED """ - # --- Data present - data = [c for _,c in Columns if c is not None] - ColNames = [n for n,_ in Columns if n is not None] - Lengths = [len(d) for d in data] - if len(data)<=0: - print('[WARN] No spanwise data for '+Name) - return None, None, None - - # --- Harmonize data so that they all have the same length - nrMax = np.max(Lengths) - ids=np.arange(nrMax) - if vr is None: - bFakeVr=True - vr_bar = ids/(nrMax-1) - else: - vr_bar=vr/R - bFakeVr=False - if (nrMax)len(vr_bar): - raise Exception('Inconsitent length between radial stations and max index present in output chanels') - - for i in np.arange(len(data)): - d=data[i] - if len(d)len(vr_bar): - print(vr_bar) - raise Exception('Inconsitent length between radial stations ({:d}) and max index present in output chanels ({:d})'.format(len(vr_bar),nrMax)) - df.insert(0, 'r/R_[-]', vr_bar) - - if IR is not None: - df['Node_[#]']=IR[:nrMax] - df['i_[#]']=ids+1 - if vr is not None: - df['r_[m]'] = vr[:nrMax] - return df - -def find_matching_columns(Cols, PatternMap): - ColsInfo=[] - nrMax=0 - for colpattern,colmap in PatternMap.items(): - # Extracting columns matching pattern - cols, sIdx = find_matching_pattern(Cols, colpattern) - if len(cols)>0: - # Sorting by ID - cols = np.asarray(cols) - Idx = np.array([int(s) for s in sIdx]) - Isort = np.argsort(Idx) - Idx = Idx[Isort] - cols = cols[Isort] - col={'name':colmap,'Idx':Idx,'cols':cols} - nrMax=max(nrMax,np.max(Idx)) - ColsInfo.append(col) - return ColsInfo,nrMax - -def extract_spanwise_data(ColsInfo, nrMax, df=None,ts=None): - """ - Extract spanwise data based on some column info - ColsInfo: see find_matching_columns - """ - nCols = len(ColsInfo) - if nCols==0: - return None - if ts is not None: - Values = np.zeros((nrMax,nCols)) - Values[:] = np.nan - elif df is not None: - raise NotImplementedError() - - ColNames =[c['name'] for c in ColsInfo] - - for ic,c in enumerate(ColsInfo): - Idx, cols, colname = c['Idx'], c['cols'], c['name'] - for idx,col in zip(Idx,cols): - Values[idx-1,ic]=ts[col] - nMissing = np.sum(np.isnan(Values[:,ic])) - if len(cols)nrMax: - print('[WARN] More values found for {}, found {}/{}'.format(colname,len(cols),nrMax)) - df = pd.DataFrame(data=Values, columns=ColNames) - df = df.reindex(sorted(df.columns), axis=1) - return df - -def spanwiseColBD(Cols): - """ Return column info, available columns and indices that contain BD spanwise data""" - BDSpanMap=dict() - for sB in ['B1','B2','B3']: - BDSpanMap['^'+sB+r'N(\d)TDxr_\[m\]']=sB+'TDxr_[m]' - BDSpanMap['^'+sB+r'N(\d)TDyr_\[m\]']=sB+'TDyr_[m]' - BDSpanMap['^'+sB+r'N(\d)TDzr_\[m\]']=sB+'TDzr_[m]' - return find_matching_columns(Cols, BDSpanMap) - -def spanwiseColED(Cols): - """ Return column info, available columns and indices that contain ED spanwise data""" - EDSpanMap=dict() - # All Outs - for sB in ['B1','B2','B3']: - EDSpanMap['^[A]*'+sB+r'N(\d*)ALx_\[m/s^2\]' ] = sB+'ALx_[m/s^2]' - EDSpanMap['^[A]*'+sB+r'N(\d*)ALy_\[m/s^2\]' ] = sB+'ALy_[m/s^2]' - EDSpanMap['^[A]*'+sB+r'N(\d*)ALz_\[m/s^2\]' ] = sB+'ALz_[m/s^2]' - EDSpanMap['^[A]*'+sB+r'N(\d*)TDx_\[m\]' ] = sB+'TDx_[m]' - EDSpanMap['^[A]*'+sB+r'N(\d*)TDy_\[m\]' ] = sB+'TDy_[m]' - EDSpanMap['^[A]*'+sB+r'N(\d*)TDz_\[m\]' ] = sB+'TDz_[m]' - EDSpanMap['^[A]*'+sB+r'N(\d*)RDx_\[deg\]' ] = sB+'RDx_[deg]' - EDSpanMap['^[A]*'+sB+r'N(\d*)RDy_\[deg\]' ] = sB+'RDy_[deg]' - EDSpanMap['^[A]*'+sB+r'N(\d*)RDz_\[deg\]' ] = sB+'RDz_[deg]' - EDSpanMap['^[A]*'+sB+r'N(\d*)MLx_\[kN-m\]' ] = sB+'MLx_[kN-m]' - EDSpanMap['^[A]*'+sB+r'N(\d*)MLy_\[kN-m\]' ] = sB+'MLy_[kN-m]' - EDSpanMap['^[A]*'+sB+r'N(\d*)MLz_\[kN-m\]' ] = sB+'MLz_[kN-m]' - EDSpanMap['^[A]*'+sB+r'N(\d*)FLx_\[kN\]' ] = sB+'FLx_[kN]' - EDSpanMap['^[A]*'+sB+r'N(\d*)FLy_\[kN\]' ] = sB+'FLy_[kN]' - EDSpanMap['^[A]*'+sB+r'N(\d*)FLz_\[kN\]' ] = sB+'FLz_[kN]' - EDSpanMap['^[A]*'+sB+r'N(\d*)FLxNT_\[kN\]' ] = sB+'FLxNT_[kN]' - EDSpanMap['^[A]*'+sB+r'N(\d*)FLyNT_\[kN\]' ] = sB+'FLyNT_[kN]' - EDSpanMap['^[A]*'+sB+r'N(\d*)FlyNT_\[kN\]' ] = sB+'FLyNT_[kN]' # <<< Unfortunate - EDSpanMap['^[A]*'+sB+r'N(\d*)MLxNT_\[kN-m\]'] = sB+'MLxNT_[kN-m]' - EDSpanMap['^[A]*'+sB+r'N(\d*)MLyNT_\[kN-m\]'] = sB+'MLyNT_[kN-m]' - # Old - for sB in ['b1','b2','b3']: - SB=sB.upper() - EDSpanMap[r'^Spn(\d)ALx'+sB+r'_\[m/s^2\]']=SB+'ALx_[m/s^2]' - EDSpanMap[r'^Spn(\d)ALy'+sB+r'_\[m/s^2\]']=SB+'ALy_[m/s^2]' - EDSpanMap[r'^Spn(\d)ALz'+sB+r'_\[m/s^2\]']=SB+'ALz_[m/s^2]' - EDSpanMap[r'^Spn(\d)TDx'+sB+r'_\[m\]' ]=SB+'TDx_[m]' - EDSpanMap[r'^Spn(\d)TDy'+sB+r'_\[m\]' ]=SB+'TDy_[m]' - EDSpanMap[r'^Spn(\d)TDz'+sB+r'_\[m\]' ]=SB+'TDz_[m]' - EDSpanMap[r'^Spn(\d)RDx'+sB+r'_\[deg\]' ]=SB+'RDx_[deg]' - EDSpanMap[r'^Spn(\d)RDy'+sB+r'_\[deg\]' ]=SB+'RDy_[deg]' - EDSpanMap[r'^Spn(\d)RDz'+sB+r'_\[deg\]' ]=SB+'RDz_[deg]' - EDSpanMap[r'^Spn(\d)FLx'+sB+r'_\[kN\]' ]=SB+'FLx_[kN]' - EDSpanMap[r'^Spn(\d)FLy'+sB+r'_\[kN\]' ]=SB+'FLy_[kN]' - EDSpanMap[r'^Spn(\d)FLz'+sB+r'_\[kN\]' ]=SB+'FLz_[kN]' - EDSpanMap[r'^Spn(\d)MLy'+sB+r'_\[kN-m\]' ]=SB+'MLx_[kN-m]' - EDSpanMap[r'^Spn(\d)MLx'+sB+r'_\[kN-m\]' ]=SB+'MLy_[kN-m]' - EDSpanMap[r'^Spn(\d)MLz'+sB+r'_\[kN-m\]' ]=SB+'MLz_[kN-m]' - return find_matching_columns(Cols, EDSpanMap) - -def spanwiseColAD(Cols): - """ Return column info, available columns and indices that contain AD spanwise data""" - ADSpanMap=dict() - for sB in ['B1','B2','B3']: - ADSpanMap['^[A]*'+sB+r'N(\d*)Alpha_\[deg\]']=sB+'Alpha_[deg]' - ADSpanMap['^[A]*'+sB+r'N(\d*)AOA_\[deg\]' ]=sB+'Alpha_[deg]' # DBGOuts - ADSpanMap['^[A]*'+sB+r'N(\d*)AxInd_\[-\]' ]=sB+'AxInd_[-]' - ADSpanMap['^[A]*'+sB+r'N(\d*)TnInd_\[-\]' ]=sB+'TnInd_[-]' - ADSpanMap['^[A]*'+sB+r'N(\d*)AIn_\[deg\]' ]=sB+'AxInd_[-]' # DBGOuts NOTE BUG Unit - ADSpanMap['^[A]*'+sB+r'N(\d*)ApI_\[deg\]' ]=sB+'TnInd_[-]' # DBGOuts NOTE BUG Unit - ADSpanMap['^[A]*'+sB+r'N(\d*)AIn_\[-\]' ]=sB+'AxInd_[-]' # DBGOuts - ADSpanMap['^[A]*'+sB+r'N(\d*)ApI_\[-\]' ]=sB+'TnInd_[-]' # DBGOuts - ADSpanMap['^[A]*'+sB+r'N(\d*)Uin_\[m/s\]' ]=sB+'Uin_[m/s]' # DBGOuts - ADSpanMap['^[A]*'+sB+r'N(\d*)Uit_\[m/s\]' ]=sB+'Uit_[m/s]' # DBGOuts - ADSpanMap['^[A]*'+sB+r'N(\d*)Uir_\[m/s\]' ]=sB+'Uir_[m/s]' # DBGOuts - ADSpanMap['^[A]*'+sB+r'N(\d*)Cl_\[-\]' ]=sB+'Cl_[-]' - ADSpanMap['^[A]*'+sB+r'N(\d*)Cd_\[-\]' ]=sB+'Cd_[-]' - ADSpanMap['^[A]*'+sB+r'N(\d*)Cm_\[-\]' ]=sB+'Cm_[-]' - ADSpanMap['^[A]*'+sB+r'N(\d*)Cx_\[-\]' ]=sB+'Cx_[-]' - ADSpanMap['^[A]*'+sB+r'N(\d*)Cy_\[-\]' ]=sB+'Cy_[-]' - ADSpanMap['^[A]*'+sB+r'N(\d*)Cn_\[-\]' ]=sB+'Cn_[-]' - ADSpanMap['^[A]*'+sB+r'N(\d*)Ct_\[-\]' ]=sB+'Ct_[-]' - ADSpanMap['^[A]*'+sB+r'N(\d*)Re_\[-\]' ]=sB+'Re_[-]' - ADSpanMap['^[A]*'+sB+r'N(\d*)Vrel_\[m/s\]' ]=sB+'Vrel_[m/s]' - ADSpanMap['^[A]*'+sB+r'N(\d*)Theta_\[deg\]']=sB+'Theta_[deg]' - ADSpanMap['^[A]*'+sB+r'N(\d*)Phi_\[deg\]' ]=sB+'Phi_[deg]' - ADSpanMap['^[A]*'+sB+r'N(\d*)Twst_\[deg\]' ]=sB+'Twst_[deg]' #DBGOuts - ADSpanMap['^[A]*'+sB+r'N(\d*)Curve_\[deg\]']=sB+'Curve_[deg]' - ADSpanMap['^[A]*'+sB+r'N(\d*)Vindx_\[m/s\]']=sB+'Vindx_[m/s]' - ADSpanMap['^[A]*'+sB+r'N(\d*)Vindy_\[m/s\]']=sB+'Vindy_[m/s]' - ADSpanMap['^[A]*'+sB+r'N(\d*)Fx_\[N/m\]' ]=sB+'Fx_[N/m]' - ADSpanMap['^[A]*'+sB+r'N(\d*)Fy_\[N/m\]' ]=sB+'Fy_[N/m]' - ADSpanMap['^[A]*'+sB+r'N(\d*)Fl_\[N/m\]' ]=sB+'Fl_[N/m]' - ADSpanMap['^[A]*'+sB+r'N(\d*)Fd_\[N/m\]' ]=sB+'Fd_[N/m]' - ADSpanMap['^[A]*'+sB+r'N(\d*)Fn_\[N/m\]' ]=sB+'Fn_[N/m]' - ADSpanMap['^[A]*'+sB+r'N(\d*)Ft_\[N/m\]' ]=sB+'Ft_[N/m]' - ADSpanMap['^[A]*'+sB+r'N(\d*)VUndx_\[m/s\]']=sB+'VUndx_[m/s]' - ADSpanMap['^[A]*'+sB+r'N(\d*)VUndy_\[m/s\]']=sB+'VUndy_[m/s]' - ADSpanMap['^[A]*'+sB+r'N(\d*)VUndz_\[m/s\]']=sB+'VUndz_[m/s]' - ADSpanMap['^[A]*'+sB+r'N(\d*)VDisx_\[m/s\]']=sB+'VDisx_[m/s]' - ADSpanMap['^[A]*'+sB+r'N(\d*)VDisy_\[m/s\]']=sB+'VDisy_[m/s]' - ADSpanMap['^[A]*'+sB+r'N(\d*)VDisz_\[m/s\]']=sB+'VDisz_[m/s]' - ADSpanMap['^[A]*'+sB+r'N(\d*)STVx_\[m/s\]' ]=sB+'STVx_[m/s]' - ADSpanMap['^[A]*'+sB+r'N(\d*)STVy_\[m/s\]' ]=sB+'STVy_[m/s]' - ADSpanMap['^[A]*'+sB+r'N(\d*)STVz_\[m/s\]' ]=sB+'STVz_[m/s]' - ADSpanMap['^[A]*'+sB+r'N(\d*)Vx_\[m/s\]' ]=sB+'Vx_[m/s]' - ADSpanMap['^[A]*'+sB+r'N(\d*)Vy_\[m/s\]' ]=sB+'Vy_[m/s]' - ADSpanMap['^[A]*'+sB+r'N(\d*)Vz_\[m/s\]' ]=sB+'Vz_[m/s]' - ADSpanMap['^[A]*'+sB+r'N(\d*)DynP_\[Pa\]' ]=sB+'DynP_[Pa]' - ADSpanMap['^[A]*'+sB+r'N(\d*)M_\[-\]' ]=sB+'M_[-]' - ADSpanMap['^[A]*'+sB+r'N(\d*)Mm_\[N-m/m\]' ]=sB+'Mm_[N-m/m]' - ADSpanMap['^[A]*'+sB+r'N(\d*)Gam_\[' ]=sB+'Gam_[m^2/s]' #DBGOuts - # --- AD 14 - ADSpanMap[r'^Alpha(\d*)_\[deg\]' ]='Alpha_[deg]' - ADSpanMap[r'^DynPres(\d*)_\[Pa\]' ]='DynPres_[Pa]' - ADSpanMap[r'^CLift(\d*)_\[-\]' ]='CLift_[-]' - ADSpanMap[r'^CDrag(\d*)_\[-\]' ]='CDrag_[-]' - ADSpanMap[r'^CNorm(\d*)_\[-\]' ]='CNorm_[-]' - ADSpanMap[r'^CTang(\d*)_\[-\]' ]='CTang_[-]' - ADSpanMap[r'^CMomt(\d*)_\[-\]' ]='CMomt_[-]' - ADSpanMap[r'^Pitch(\d*)_\[deg\]' ]='Pitch_[deg]' - ADSpanMap[r'^AxInd(\d*)_\[-\]' ]='AxInd_[-]' - ADSpanMap[r'^TanInd(\d*)_\[-\]' ]='TanInd_[-]' - ADSpanMap[r'^ForcN(\d*)_\[N\]' ]='ForcN_[N]' - ADSpanMap[r'^ForcT(\d*)_\[N\]' ]='ForcT_[N]' - ADSpanMap[r'^Pmomt(\d*)_\[N-m\]' ]='Pmomt_[N-N]' - ADSpanMap[r'^ReNum(\d*)_\[x10^6\]']='ReNum_[x10^6]' - ADSpanMap[r'^Gamma(\d*)_\[m^2/s\]']='Gamma_[m^2/s]' - - return find_matching_columns(Cols, ADSpanMap) - -def insert_extra_columns_AD(dfRad, tsAvg, vr=None, rho=None, R=None, nB=None, chord=None): - # --- Compute additional values (AD15 only) - if dfRad is None: - return None - if dfRad.shape[1]==0: - return dfRad - if chord is not None: - if vr is not None: - chord =chord[0:len(dfRad)] - for sB in ['B1','B2','B3']: - try: - vr_bar=vr/R - Fx = dfRad[sB+'Fx_[N/m]'] - U0 = tsAvg['Wind1VelX_[m/s]'] - Ct=nB*Fx/(0.5 * rho * 2 * U0**2 * np.pi * vr) - Ct[vr<0.01*R] = 0 - dfRad[sB+'Ctloc_[-]'] = Ct - CT=2*np.trapz(vr_bar*Ct,vr_bar) - dfRad[sB+'CtAvg_[-]']= CT*np.ones(vr.shape) - except: - pass - try: - dfRad[sB+'Gamma_[m^2/s]'] = 1/2 * chord* dfRad[sB+'Vrel_[m/s]'] * dfRad[sB+'Cl_[-]'] - except: - pass - try: - if not sB+'Vindx_[m/s]' in dfRad.columns: - dfRad[sB+'Vindx_[m/s]']= -dfRad[sB+'AxInd_[-]'].values * dfRad[sB+'Vx_[m/s]'].values - dfRad[sB+'Vindy_[m/s]']= dfRad[sB+'TnInd_[-]'].values * dfRad[sB+'Vy_[m/s]'].values - except: - pass - return dfRad - - - -def spanwisePostPro(FST_In=None,avgMethod='constantwindow',avgParam=5,out_ext='.outb',df=None): - """ - Postprocess FAST radial data - - INPUTS: - - FST_IN: Fast .fst input file - - avgMethod='periods', avgParam=2: average over 2 last periods, Needs Azimuth sensors!!! - - avgMethod='constantwindow', avgParam=5: average over 5s of simulation - - postprofile: outputfile to write radial data - """ - # --- Opens Fast output and performs averaging - if df is None: - df = FASTOutputFile(FST_In.replace('.fst',out_ext).replace('.dvr',out_ext)).toDataFrame() - returnDF=True - else: - returnDF=False - # NOTE: spanwise script doest not support duplicate columns - df = df.loc[:,~df.columns.duplicated()] - dfAvg = averageDF(df,avgMethod=avgMethod ,avgParam=avgParam) # NOTE: average 5 last seconds - - # --- Extract info (e.g. radial positions) from Fast input file - # We don't have a .fst input file, so we'll rely on some default values for "r" - rho = 1.225 - chord = None - # --- Extract radial positions of output channels - r_AD, r_ED, r_BD, IR_AD, IR_ED, IR_BD, R, r_hub, fst = FASTRadialOutputs(FST_In, OutputCols=df.columns.values) - if R is None: - R=1 - try: - chord = fst.AD.Bld1['BldAeroNodes'][:,5] # Full span - except: - pass - try: - rho = fst.AD['Rho'] - except: - try: - rho = fst.AD['AirDens'] - except: - pass - #print('r_AD:', r_AD) - #print('r_ED:', r_ED) - #print('r_BD:', r_BD) - #print('I_AD:', IR_AD) - #print('I_ED:', IR_ED) - #print('I_BD:', IR_BD) - # --- Extract radial data and export to csv if needed - dfRad_AD = None - dfRad_ED = None - dfRad_BD = None - Cols=dfAvg.columns.values - # --- AD - ColsInfoAD, nrMaxAD = spanwiseColAD(Cols) - dfRad_AD = extract_spanwise_data(ColsInfoAD, nrMaxAD, df=None, ts=dfAvg.iloc[0]) - dfRad_AD = insert_extra_columns_AD(dfRad_AD, dfAvg.iloc[0], vr=r_AD, rho=rho, R=R, nB=3, chord=chord) - dfRad_AD = insert_radial_columns(dfRad_AD, r_AD, R=R, IR=IR_AD) - # --- ED - ColsInfoED, nrMaxED = spanwiseColED(Cols) - dfRad_ED = extract_spanwise_data(ColsInfoED, nrMaxED, df=None, ts=dfAvg.iloc[0]) - dfRad_ED = insert_radial_columns(dfRad_ED, r_ED, R=R, IR=IR_ED) - # --- BD - ColsInfoBD, nrMaxBD = spanwiseColBD(Cols) - dfRad_BD = extract_spanwise_data(ColsInfoBD, nrMaxBD, df=None, ts=dfAvg.iloc[0]) - dfRad_BD = insert_radial_columns(dfRad_BD, r_BD, R=R, IR=IR_BD) - if returnDF: - return dfRad_ED , dfRad_AD, dfRad_BD, df - else: - return dfRad_ED , dfRad_AD, dfRad_BD - - - -def spanwisePostProRows(df, FST_In=None): - """ - Returns a 3D matrix: n x nSpan x nColumn where df is of size n x nColumn - - NOTE: this is really not optimal. Spanwise columns should be extracted only once.. - """ - # --- Extract info (e.g. radial positions) from Fast input file - # We don't have a .fst input file, so we'll rely on some default values for "r" - rho = 1.225 - chord = None - # --- Extract radial positions of output channels - r_AD, r_ED, r_BD, IR_AD, IR_ED, IR_BD, R, r_hub, fst = FASTRadialOutputs(FST_In, OutputCols=df.columns.values) - #print('r_AD:', r_AD) - #print('r_ED:', r_ED) - #print('r_BD:', r_BD) - if R is None: - R=1 - try: - chord = fst.AD.Bld1['BldAeroNodes'][:,5] # Full span - except: - pass - try: - rho = fst.AD['Rho'] - except: - try: - rho = fst.AD['AirDens'] - except: - pass - # --- Extract radial data for each azimuthal average - M_AD=None - M_ED=None - M_BD=None - Col_AD=None - Col_ED=None - Col_BD=None - v = df.index.values - - # --- Getting Column info - Cols=df.columns.values - if r_AD is not None: - ColsInfoAD, nrMaxAD = spanwiseColAD(Cols) - if r_ED is not None: - ColsInfoED, nrMaxED = spanwiseColED(Cols) - if r_BD is not None: - ColsInfoBD, nrMaxBD = spanwiseColBD(Cols) - for i,val in enumerate(v): - if r_AD is not None: - dfRad_AD = extract_spanwise_data(ColsInfoAD, nrMaxAD, df=None, ts=df.iloc[i]) - dfRad_AD = insert_extra_columns_AD(dfRad_AD, df.iloc[i], vr=r_AD, rho=rho, R=R, nB=3, chord=chord) - dfRad_AD = insert_radial_columns(dfRad_AD, r_AD, R=R, IR=IR_AD) - if i==0: - M_AD = np.zeros((len(v), len(dfRad_AD), len(dfRad_AD.columns))) - Col_AD=dfRad_AD.columns.values - M_AD[i, :, : ] = dfRad_AD.values - if r_ED is not None and len(r_ED)>0: - dfRad_ED = extract_spanwise_data(ColsInfoED, nrMaxED, df=None, ts=df.iloc[i]) - dfRad_ED = insert_radial_columns(dfRad_ED, r_ED, R=R, IR=IR_ED) - if i==0: - M_ED = np.zeros((len(v), len(dfRad_ED), len(dfRad_ED.columns))) - Col_ED=dfRad_ED.columns.values - M_ED[i, :, : ] = dfRad_ED.values - if r_BD is not None and len(r_BD)>0: - dfRad_BD = extract_spanwise_data(ColsInfoBD, nrMaxBD, df=None, ts=df.iloc[i]) - dfRad_BD = insert_radial_columns(dfRad_BD, r_BD, R=R, IR=IR_BD) - if i==0: - M_BD = np.zeros((len(v), len(dfRad_BD), len(dfRad_BD.columns))) - Col_BD=dfRad_BD.columns.values - M_BD[i, :, : ] = dfRad_BD.values - return M_AD, Col_AD, M_ED, Col_ED, M_BD, Col_BD - - -def FASTRadialOutputs(FST_In, OutputCols=None): - """ Returns radial positions where FAST has outputs - INPUTS: - FST_In: fast input file (.fst) - OUTPUTS: - r_AD: radial positions of FAST Outputs from the rotor center - """ - R = None - r_hub =0 - r_AD = None - r_ED = None - r_BD = None - IR_ED = None - IR_AD = None - IR_BD = None - fst=None - if FST_In is not None: - fst = FASTInputDeck(FST_In, readlist=['AD','ADbld','ED','BD']) - # NOTE: all this below should be in FASTInputDeck - if fst.version == 'F7': - # --- FAST7 - if not hasattr(fst,'AD'): - raise Exception('The AeroDyn file couldn''t be found or read, from main file: '+FST_In) - r_AD,IR_AD = AD14_BldGag(fst.AD) - R = fst.fst['TipRad'] - try: - rho = fst.AD['Rho'] - except: - rho = fst.AD['AirDens'] - else: - # --- OpenFAST 2 - R = None - - # --- ElastoDyn - if 'NumTurbines' in fst.fst.keys(): - # AeroDyn driver... - if 'HubRad(1)' in fst.fst.keys(): - r_hub = fst.fst['HubRad(1)'] - else: - r_hub = fst.fst['BldHubRad_bl(1_1)'] - - elif not hasattr(fst,'ED'): - print('[WARN] The Elastodyn file couldn''t be found or read, from main file: '+FST_In) - #raise Exception('The Elastodyn file couldn''t be found or read, from main file: '+FST_In) - else: - R = fst.ED['TipRad'] - r_hub = fst.ED['HubRad'] - if fst.ED.hasNodal: - _, r_ED = ED_BldStations(fst.ED) - IR_ED =None - else: - r_ED, IR_ED = ED_BldGag(fst.ED) - - # --- BeamDyn - if fst.BD is not None: - r_BD, IR_BD, r_BD_All = BD_BldGag(fst.BD) - r_BD= r_BD+r_hub - if R is None: - R = r_BD_All[-1] # just in case ED file missing - - # --- AeroDyn - if fst.AD is None: - print('[WARN] The AeroDyn file couldn''t be found or read, from main file: '+FST_In) - #raise Exception('The AeroDyn file couldn''t be found or read, from main file: '+FST_In) - else: - if fst.ADversion == 'AD15': - if fst.AD.Bld1 is None: - raise Exception('The AeroDyn blade file couldn''t be found or read, from main file: '+FST_In) - - if 'B1N001Cl_[-]' in OutputCols or np.any(np.char.find(list(OutputCols),'AB1N')==0): - # This was compiled with all outs - r_AD = fst.AD.Bld1['BldAeroNodes'][:,0] # Full span - r_AD += r_hub - IR_AD = None - else: - r_AD,_ = AD_BldGag(fst.AD,fst.AD.Bld1, chordOut = True) # Only at Gages locations - r_AD += r_hub - - if R is None: - # ElastoDyn was not read, we use R from AD - R = fst.AD.Bld1['BldAeroNodes'][-1,0] - - elif fst.ADversion == 'AD14': - r_AD,IR_AD = AD14_BldGag(fst.AD) - - else: - raise Exception('AeroDyn version unknown') - return r_AD, r_ED, r_BD, IR_AD, IR_ED, IR_BD, R, r_hub, fst - - - -def addToOutlist(OutList, Signals): - if not isinstance(Signals,list): - raise Exception('Signals must be a list') - for s in Signals: - ss=s.split()[0].strip().strip('"').strip('\'') - AlreadyIn = any([o.find(ss)==1 for o in OutList ]) - if not AlreadyIn: - OutList.append(s) - return OutList - - - -# --------------------------------------------------------------------------------} -# --- Generic df -# --------------------------------------------------------------------------------{ -def remap_df(df, ColMap, bColKeepNewOnly=False, inPlace=False, dataDict=None, verbose=False): - """ Add/rename columns of a dataframe, potentially perform operations between columns - - dataDict: dicitonary of data to be made available as "variable" in the column mapping - - Example: - - ColumnMap={ - 'WS_[m/s]' : '{Wind1VelX_[m/s]}' , # create a new column from existing one - 'RtTSR_[-]' : '{RtTSR_[-]} * 2 + {RtAeroCt_[-]}' , # change value of column - 'RotSpeed_[rad/s]' : '{RotSpeed_[rpm]} * 2*np.pi/60 ', # new column [rpm] -> [rad/s] - } - # Read - df = weio.read('FASTOutBin.outb').toDataFrame() - # Change columns based on formulae, potentially adding new columns - df = fastlib.remap_df(df, ColumnMap, inplace=True) - - """ - # Insert dataDict into namespace - if dataDict is not None: - for k,v in dataDict.items(): - exec('{:s} = dataDict["{:s}"]'.format(k,k)) - - - if not inPlace: - df=df.copy() - ColMapMiss=[] - ColNew=[] - RenameMap=dict() - # Loop for expressions - for k0,v in ColMap.items(): - k=k0.strip() - v=v.strip() - if v.find('{')>=0: - search_results = re.finditer(r'\{.*?\}', v) - expr=v - if verbose: - print('Attempt to insert column {:15s} with expr {}'.format(k,v)) - # For more advanced operations, we use an eval - bFail=False - for item in search_results: - col=item.group(0)[1:-1] - if col not in df.columns: - ColMapMiss.append(col) - bFail=True - expr=expr.replace(item.group(0),'df[\''+col+'\']') - #print(k0, '=', expr) - if not bFail: - df[k]=eval(expr) - ColNew.append(k) - else: - print('[WARN] Column not present in dataframe, cannot evaluate: ',expr) - else: - #print(k0,'=',v) - if v not in df.columns: - ColMapMiss.append(v) - print('[WARN] Column not present in dataframe: ',v) - else: - RenameMap[k]=v - - # Applying renaming only now so that expressions may be applied in any order - for k,v in RenameMap.items(): - if verbose: - print('Renaming column {:15s} > {}'.format(v,k)) - k=k.strip() - iCol = list(df.columns).index(v) - df.columns.values[iCol]=k - ColNew.append(k) - df.columns = df.columns.values # Hack to ensure columns are updated - - if len(ColMapMiss)>0: - print('[FAIL] The following columns were not found in the dataframe:',ColMapMiss) - #print('Available columns are:',df.columns.values) - - if bColKeepNewOnly: - ColNew = [c for c,_ in ColMap.items() if c in ColNew]# Making sure we respec order from user - ColKeepSafe = [c for c in ColNew if c in df.columns.values] - ColKeepMiss = [c for c in ColNew if c not in df.columns.values] - if len(ColKeepMiss)>0: - print('[WARN] Signals missing and omitted for ColKeep:\n '+'\n '.join(ColKeepMiss)) - df=df[ColKeepSafe] - return df - - -# --------------------------------------------------------------------------------} -# --- Tools for PostProcessing one or several simulations -# --------------------------------------------------------------------------------{ -def _zero_crossings(y,x=None,direction=None): - """ - Find zero-crossing points in a discrete vector, using linear interpolation. - direction: 'up' or 'down', to select only up-crossings or down-crossings - Returns: - x values xzc such that y(yzc)==0 - indexes izc, such that the zero is between y[izc] (excluded) and y[izc+1] (included) - if direction is not provided, also returns: - sign, equal to 1 for up crossing - """ - y=np.asarray(y) - if x is None: - x=np.arange(len(y)) - - if np.any((x[1:] - x[0:-1]) <= 0.0): - raise Exception('x values need to be in ascending order') - - # Indices before zero-crossing - iBef = np.where(y[1:]*y[0:-1] < 0.0)[0] - - # Find the zero crossing by linear interpolation - xzc = x[iBef] - y[iBef] * (x[iBef+1] - x[iBef]) / (y[iBef+1] - y[iBef]) - - # Selecting points that are exactly 0 and where neighbor change sign - iZero = np.where(y == 0.0)[0] - iZero = iZero[np.where((iZero > 0) & (iZero < x.size-1))] - iZero = iZero[np.where(y[iZero-1]*y[iZero+1] < 0.0)] - - # Concatenate - xzc = np.concatenate((xzc, x[iZero])) - iBef = np.concatenate((iBef, iZero)) - - # Sort - iSort = np.argsort(xzc) - xzc, iBef = xzc[iSort], iBef[iSort] - - # Return up-crossing, down crossing or both - sign = np.sign(y[iBef+1]-y[iBef]) - if direction == 'up': - I= np.where(sign==1)[0] - return xzc[I],iBef[I] - elif direction == 'down': - I= np.where(sign==-1)[0] - return xzc[I],iBef[I] - elif direction is not None: - raise Exception('Direction should be either `up` or `down`') - return xzc, iBef, sign - -def find_matching_pattern(List, pattern): - """ Return elements of a list of strings that match a pattern - and return the first matching group - """ - reg_pattern=re.compile(pattern) - MatchedElements=[] - MatchedStrings=[] - for l in List: - match=reg_pattern.search(l) - if match: - MatchedElements.append(l) - if len(match.groups(1))>0: - MatchedStrings.append(match.groups(1)[0]) - else: - MatchedStrings.append('') - return MatchedElements, MatchedStrings - - - -def extractSpanTSReg(ts, col_pattern, colname, IR=None): - r""" Helper function to extract spanwise results, like B1N1Cl B1N2Cl etc. - - Example - col_pattern: r'B1N(\d*)Cl_\[-\]' - colname : r'B1Cl_[-]' - """ - # Extracting columns matching pattern - cols, sIdx = find_matching_pattern(ts.keys(), col_pattern) - if len(cols) ==0: - return (None,None) - - # Sorting by ID - cols = np.asarray(cols) - Idx = np.array([int(s) for s in sIdx]) - Isort = np.argsort(Idx) - Idx = Idx[Isort] - cols = cols[Isort] - - nrMax = np.max(Idx) - Values = np.zeros((nrMax,1)) - Values[:] = np.nan -# if IR is None: -# cols = [col_pattern.format(ir+1) for ir in range(nr)] -# else: -# cols = [col_pattern.format(ir) for ir in IR] - for idx,col in zip(Idx,cols): - Values[idx-1]=ts[col] - nMissing = np.sum(np.isnan(Values)) - if nMissing==nrMax: - return (None,None) - if len(cols)nrMax: - print('[WARN] More values found for {}, found {}/{}'.format(colname,len(cols),nrMax)) - return (colname,Values) - -def extractSpanTS(ts, nr, col_pattern, colname, IR=None): - """ Helper function to extract spanwise results, like B1N1Cl B1N2Cl etc. - - Example - col_pattern: 'B1N{:d}Cl_[-]' - colname : 'B1Cl_[-]' - """ - Values=np.zeros((nr,1)) - if IR is None: - cols = [col_pattern.format(ir+1) for ir in range(nr)] - else: - cols = [col_pattern.format(ir) for ir in IR] - colsExist = [c for c in cols if c in ts.keys() ] - if len(colsExist)==0: - return (None,None) - - Values = [ts[c] if c in ts.keys() else np.nan for c in cols ] - nMissing = np.sum(np.isnan(Values)) - #Values = ts[cols].T - #nCoun=len(Values) - if nMissing==nr: - return (None,None) - if len(colsExist)nr: - print('[WARN] More values found for {}, found {}/{}'.format(colname,len(cols),nr)) - return (colname,Values) - -def radialInterpTS(df, r, varName, r_ref, blade=1, bldFmt='AB{:d}', ndFmt='N{:03d}', method='interp'): - """ - Interpolate a time series at a given radial position for a given variable (varName) - INPUTS: - - df : a dataframe (typically with OpenFAST time series) - - r : radial positions of node where data is to be interpolated - - varName: variable name (and unit) to be interpolated. - The dataframe column will be assumed to be "BldFmt"+"ndFmt"+varName - - r_ref : radial position of nodal data present in the dataframe - - bldFmt : format for blade number, e.g. 'B{:d}' or 'AB{:d}' - - ndFmt : format for node number, e.g. 'N{:d}' or 'N{:03d}' - OUTPUT: - - interpolated time series - """ - # --- Sanity checks - r_ref = np.asarray(r_ref) - if not np.all(r_ref[:-1] <= r_ref[1:]): - raise Exception('This function only works for ascending radial values') - - # No extrapolation - if rnp.max(r_ref): - raise Exception('Extrapolation not supported') - - # Exactly on first or last nodes - if r==r_ref[0]: - col=bldFmt.format(blade) + ndFmt.format(1) + varName - if col in df.columns.values: - return df[col] - else: - raise Exception('Column {} not found in dataframe'.format(col)) - elif r==r_ref[-1]: - col=bldFmt.format(blade) + ndFmt.format(len(r_ref)+1) + varName - if col in df.columns.values: - return df[col] - else: - raise Exception('Column {} not found in dataframe'.format(col)) - - if method=='interp': - # Interpolation - iBef = np.where(r_reftStart].copy() - - dfPsi= bin_mean_DF(df, psiBin, colPsi) - if np.any(dfPsi['Counts']<1): - print('[WARN] some bins have no data! Increase the bin size.') - - return dfPsi - - -def averageDF(df,avgMethod='periods',avgParam=None,ColMap=None,ColKeep=None,ColSort=None,stats=['mean']): - """ - See average PostPro for documentation, same interface, just does it for one dataframe - """ - def renameCol(x): - for k,v in ColMap.items(): - if x==v: - return k - return x - # Before doing the colomn map we store the time - time = df['Time_[s]'].values - timenoNA = time[~np.isnan(time)] - # Column mapping - if ColMap is not None: - ColMapMiss = [v for _,v in ColMap.items() if v not in df.columns.values] - if len(ColMapMiss)>0: - print('[WARN] Signals missing and omitted for ColMap:\n '+'\n '.join(ColMapMiss)) - df.rename(columns=renameCol,inplace=True) - ## Defining a window for stats (start time and end time) - if avgMethod.lower()=='constantwindow': - tEnd = timenoNA[-1] - if avgParam is None: - tStart=timenoNA[0] - else: - tStart =tEnd-avgParam - elif avgMethod.lower()=='periods': - # --- Using azimuth to find periods - if 'Azimuth_[deg]' not in df.columns: - raise Exception('The sensor `Azimuth_[deg]` does not appear to be in the output file. You cannot use the averaging method by `periods`, use `constantwindow` instead.') - # NOTE: potentially we could average over each period and then average - psi=df['Azimuth_[deg]'].values - _,iBef = _zero_crossings(psi-psi[-10],direction='up') - if len(iBef)==0: - _,iBef = _zero_crossings(psi-180,direction='up') - if len(iBef)==0: - print('[WARN] Not able to find a zero crossing!') - tEnd = time[-1] - iBef=[0] - else: - tEnd = time[iBef[-1]] - - if avgParam is None: - tStart=time[iBef[0]] - else: - avgParam=int(avgParam) - if len(iBef)-10: - print('[WARN] Signals missing and omitted for ColKeep:\n '+'\n '.join(ColKeepMiss)) - df=df[ColKeepSafe] - if tStart=tStart) & (time<=tEnd) & (~np.isnan(time)))[0] - iEnd = IWindow[-1] - iStart = IWindow[0] - ## Absolute and relative differences at window extremities - DeltaValuesAbs=(df.iloc[iEnd]-df.iloc[iStart]).abs() -# DeltaValuesRel=(df.iloc[iEnd]-df.iloc[iStart]).abs()/df.iloc[iEnd] - DeltaValuesRel=(df.iloc[IWindow].max()-df.iloc[IWindow].min())/df.iloc[IWindow].mean() - #EndValues=df.iloc[iEnd] - #if avgMethod.lower()=='periods_omega': - # if DeltaValuesRel['RotSpeed_[rpm]']*100>5: - # print('[WARN] Rotational speed vary more than 5% in averaging window ({}%) for simulation: {}'.format(DeltaValuesRel['RotSpeed_[rpm]']*100,f)) - ## Stats values during window - # MeanValues = df[IWindow].mean() - # StdValues = df[IWindow].std() - if 'mean' in stats: - MeanValues = pd.DataFrame(df.iloc[IWindow].mean()).transpose() - else: - raise NotImplementedError() - return MeanValues - - - -def averagePostPro(outFiles,avgMethod='periods',avgParam=None,ColMap=None,ColKeep=None,ColSort=None,stats=['mean']): - """ Opens a list of FAST output files, perform average of its signals and return a panda dataframe - For now, the scripts only computes the mean within a time window which may be a constant or a time that is a function of the rotational speed (see `avgMethod`). - The script only computes the mean for now. Other stats will be added - - `ColMap` : dictionary where the key is the new column name, and v the old column name. - Default: None, output is not sorted - NOTE: the mapping is done before sorting and `ColKeep` is applied - ColMap = {'WS':Wind1VelX_[m/s], 'RPM': 'RotSpeed_[rpm]'} - `ColKeep` : List of strings corresponding to the signals to analyse. - Default: None, all columns are analysed - Example: ColKeep=['RotSpeed_[rpm]','BldPitch1_[deg]','RtAeroCp_[-]'] - or: ColKeep=list(ColMap.keys()) - `avgMethod` : string defining the method used to determine the extent of the averaging window: - - 'periods': use a number of periods(`avgParam`), determined by the azimuth. - - 'periods_omega': use a number of periods(`avgParam`), determined by the mean RPM - - 'constantwindow': the averaging window is constant (defined by `avgParam`). - `avgParam`: based on `avgMethod` it is either - - for 'periods_*': the number of revolutions for the window. - Default: None, as many period as possible are used - - for 'constantwindow': the number of seconds for the window - Default: None, full simulation length is used - """ - result=None - invalidFiles =[] - # Loop trough files and populate result - for i,f in enumerate(outFiles): - try: - df=FASTOutputFile(f).toDataFrame() - except: - invalidFiles.append(f) - continue - postpro=averageDF(df, avgMethod=avgMethod, avgParam=avgParam, ColMap=ColMap, ColKeep=ColKeep,ColSort=ColSort,stats=stats) - MeanValues=postpro # todo - if result is None: - # We create a dataframe here, now that we know the colums - columns = MeanValues.columns - result = pd.DataFrame(np.nan, index=np.arange(len(outFiles)), columns=columns) - result.iloc[i,:] = MeanValues.copy().values - - if ColSort is not None: - # Sorting - result.sort_values([ColSort],inplace=True,ascending=True) - result.reset_index(drop=True,inplace=True) - - if len(invalidFiles)==len(outFiles): - raise Exception('None of the files can be read (or exist)!') - elif len(invalidFiles)>0: - print('[WARN] There were {} missing/invalid files: {}'.format(len(invalidFiles),invalidFiles)) - - - return result - - -if __name__ == '__main__': - main() +# --- For cmd.py +from __future__ import division, print_function +import os +import pandas as pd +import numpy as np +import re + +# --- fast libraries +from weio.weio.fast_input_file import FASTInputFile +from weio.weio.fast_output_file import FASTOutputFile +from weio.weio.fast_input_deck import FASTInputDeck +# from pyFAST.input_output.fast_input_file import FASTInputFile +# from pyFAST.input_output.fast_output_file import FASTOutputFile +# from pyFAST.input_output.fast_input_deck import FASTInputDeck + +# --------------------------------------------------------------------------------} +# --- Tools for IO +# --------------------------------------------------------------------------------{ +def ED_BldStations(ED): + """ Returns ElastoDyn Blade Station positions, useful to know where the outputs are. + INPUTS: + - ED: either: + - a filename of a ElastoDyn input file + - an instance of FileCl, as returned by reading the file, ED = weio.read(ED_filename) + + OUTUPTS: + - bld_fract: fraction of the blade length were stations are defined + - r_nodes: spanwise position from the rotor apex of the Blade stations + """ + if hasattr(ED,'startswith'): # if string + ED = FASTInputFile(ED) + + nBldNodes = ED['BldNodes'] + bld_fract = np.arange(1./nBldNodes/2., 1, 1./nBldNodes) + r_nodes = bld_fract*(ED['TipRad']-ED['HubRad']) + ED['HubRad'] + return bld_fract, r_nodes + +def ED_TwrStations(ED): + """ Returns ElastoDyn Tower Station positions, useful to know where the outputs are. + INPUTS: + - ED: either: + - a filename of a ElastoDyn input file + - an instance of FileCl, as returned by reading the file, ED = weio.read(ED_filename) + + OUTPUTS: + - r_fract: fraction of the towet length were stations are defined + - h_nodes: height from the *ground* of the stations (not from the Tower base) + """ + if hasattr(ED,'startswith'): # if string + ED = FASTInputFile(ED) + + nTwrNodes = ED['TwrNodes'] + twr_fract = np.arange(1./nTwrNodes/2., 1, 1./nTwrNodes) + h_nodes = twr_fract*(ED['TowerHt']-ED['TowerBsHt']) + ED['TowerBsHt'] + return twr_fract, h_nodes + +def ED_BldGag(ED): + """ Returns the radial position of ElastoDyn blade gages + INPUTS: + - ED: either: + - a filename of a ElastoDyn input file + - an instance of FileCl, as returned by reading the file, ED = weio.read(ED_filename) + OUTPUTS: + - r_gag: The radial positions of the gages, given from the rotor apex + """ + if hasattr(ED,'startswith'): # if string + ED = FASTInputFile(ED) + _,r_nodes= ED_BldStations(ED) + + # if ED.hasNodal: + # return r_nodes, None + nOuts = ED['NBlGages'] + if nOuts<=0: + return np.array([]), np.array([]) + if type(ED['BldGagNd']) is list: + Inodes = np.asarray(ED['BldGagNd']) + else: + Inodes = np.array([ED['BldGagNd']]) + r_gag = r_nodes[ Inodes[:nOuts] -1] + return r_gag, Inodes + +def ED_TwrGag(ED): + """ Returns the heights of ElastoDyn blade gages + INPUTS: + - ED: either: + - a filename of a ElastoDyn input file + - an instance of FileCl, as returned by reading the file, ED = weio.read(ED_filename) + OUTPUTS: + - h_gag: The heights of the gages, given from the ground height (tower base + TowerBsHt) + """ + if hasattr(ED,'startswith'): # if string + ED = FASTInputFile(ED) + _,h_nodes= ED_TwrStations(ED) + nOuts = ED['NTwGages'] + if nOuts<=0: + return np.array([]) + if type(ED['TwrGagNd']) is list: + Inodes = np.asarray(ED['TwrGagNd']) + else: + Inodes = np.array([ED['TwrGagNd']]) + h_gag = h_nodes[ Inodes[:nOuts] -1] + return h_gag + + +def AD14_BldGag(AD): + """ Returns the radial position of AeroDyn 14 blade gages (based on "print" in column 6) + INPUTS: + - AD: either: + - a filename of a AeroDyn input file + - an instance of FileCl, as returned by reading the file, AD = weio.read(AD_filename) + OUTPUTS: + - r_gag: The radial positions of the gages, given from the blade root + """ + if hasattr(ED,'startswith'): # if string + AD = FASTInputFile(AD) + + Nodes=AD['BldAeroNodes'] + if Nodes.shape[1]==6: + doPrint= np.array([ n.lower().find('p')==0 for n in Nodes[:,5]]) + else: + doPrint=np.array([ True for n in Nodes[:,0]]) + + r_gag = Nodes[doPrint,0].astype(float) + IR = np.arange(1,len(Nodes)+1)[doPrint] + return r_gag, IR + +def AD_BldGag(AD,AD_bld,chordOut=False): + """ Returns the radial position of AeroDyn blade gages + INPUTS: + - AD: either: + - a filename of a AeroDyn input file + - an instance of FileCl, as returned by reading the file, AD = weio.read(AD_filename) + - AD_bld: either: + - a filename of a AeroDyn Blade input file + - an instance of FileCl, as returned by reading the file, AD_bld = weio.read(AD_bld_filename) + OUTPUTS: + - r_gag: The radial positions of the gages, given from the blade root + """ + if hasattr(AD,'startswith'): # if string + AD = FASTInputFile(AD) + if hasattr(AD_bld,'startswith'): # if string + AD_bld = FASTInputFile(AD_bld) + #print(AD_bld.keys()) + + nOuts=AD['NBlOuts'] + if nOuts<=0: + if chordOut: + return np.array([]), np.array([]) + else: + return np.array([]) + INodes = np.array(AD['BlOutNd'][:nOuts]) + r_gag = AD_bld['BldAeroNodes'][INodes-1,0] + if chordOut: + chord_gag = AD_bld['BldAeroNodes'][INodes-1,5] + return r_gag,chord_gag + else: + return r_gag + +def BD_BldStations(BD, BDBld): + """ Returns BeamDyn Blade Station positions, useful to know where the outputs are. + INPUTS: + - BD: either: + - a filename of a ElastoDyn input file + - an instance of FileCl, as returned by reading the file, BD = weio.read(BD_filename) + OUTUPTS: + - r_nodes: spanwise position from the balde root of the Blade stations + """ + if hasattr(BD,'startswith'): # if string + BD = FASTInputFile(BD) + if hasattr(BD,'startswith'): # if string + BDBld = FASTInputFile(BDBld) + # BD['BldFile'].replace('"','')) + + z_kp = BD['MemberGeom'][:,2] + R = z_kp[-1]-z_kp[0] + r = BDBld['BeamProperties']['span']*R + quad = BD['quadrature'] + ref = BD['refine'] + if 'default' in str(ref).lower(): + ref = 1 + dr = np.diff(r)/ref + rmid = np.concatenate( [r[:-1]+dr*(iref+1) for iref in np.arange(ref-1) ]) + r = np.concatenate( (r, rmid)) + r = np.unique(np.sort(r)) + return r + +def BD_BldGag(BD): + """ Returns the radial position of BeamDyn blade gages + INPUTS: + - BD: either: + - a filename of a BeamDyn input file + - an instance of FileCl, as returned by reading the file, BD = weio.read(BD_filename) + OUTPUTS: + - r_gag: The radial positions of the gages, given from the rotor apex + """ + if hasattr(BD,'startswith'): # if string + BD = FASTInputFile(BD) + + M = BD['MemberGeom'] + r_nodes = M[:,2] # NOTE: we select the z axis here, and we don't take curvilenear coord + nOuts = BD['NNodeOuts'] + if nOuts<=0: + nOuts=0 + if type(BD['OutNd']) is list: + Inodes = np.asarray(BD['OutNd']) + else: + Inodes = np.array([BD['OutNd']]) + r_gag = r_nodes[ Inodes[:nOuts] -1] + return r_gag, Inodes, r_nodes + +# +# +# 1, 7, 14, 21, 30, 36, 43, 52, 58 BldGagNd List of blade nodes that have strain gages [1 to BldNodes] (-) [unused if NBlGages=0] + +# --------------------------------------------------------------------------------} +# --- Helper functions for radial data +# --------------------------------------------------------------------------------{ +def _HarmonizeSpanwiseData(Name, Columns, vr, R, IR=None) : + """ helper function to use with spanwiseAD and spanwiseED """ + # --- Data present + data = [c for _,c in Columns if c is not None] + ColNames = [n for n,_ in Columns if n is not None] + Lengths = [len(d) for d in data] + if len(data)<=0: + print('[WARN] No spanwise data for '+Name) + return None, None, None + + # --- Harmonize data so that they all have the same length + nrMax = np.max(Lengths) + ids=np.arange(nrMax) + if vr is None: + bFakeVr=True + vr_bar = ids/(nrMax-1) + else: + vr_bar=vr/R + bFakeVr=False + if (nrMax)len(vr_bar): + raise Exception('Inconsitent length between radial stations and max index present in output chanels') + + for i in np.arange(len(data)): + d=data[i] + if len(d)len(vr_bar): + raise Exception('Inconsitent length between radial stations ({:d}) and max index present in output chanels ({:d})'.format(len(vr_bar),nrMax)) + df.insert(0, 'r/R_[-]', vr_bar) + + if IR is not None: + df['Node_[#]']=IR[:nrMax] + df['i_[#]']=ids+1 + if vr is not None: + df['r_[m]'] = vr[:nrMax] + return df + +def find_matching_columns(Cols, PatternMap): + ColsInfo=[] + nrMax=0 + for colpattern,colmap in PatternMap.items(): + # Extracting columns matching pattern + cols, sIdx = find_matching_pattern(Cols, colpattern) + if len(cols)>0: + # Sorting by ID + cols = np.asarray(cols) + Idx = np.array([int(s) for s in sIdx]) + Isort = np.argsort(Idx) + Idx = Idx[Isort] + cols = cols[Isort] + col={'name':colmap,'Idx':Idx,'cols':cols} + nrMax=max(nrMax,np.max(Idx)) + ColsInfo.append(col) + return ColsInfo,nrMax + +def extract_spanwise_data(ColsInfo, nrMax, df=None,ts=None): + """ + Extract spanwise data based on some column info + ColsInfo: see find_matching_columns + """ + nCols = len(ColsInfo) + if nCols==0: + return None + if ts is not None: + Values = np.zeros((nrMax,nCols)) + Values[:] = np.nan + elif df is not None: + raise NotImplementedError() + + ColNames =[c['name'] for c in ColsInfo] + + for ic,c in enumerate(ColsInfo): + Idx, cols, colname = c['Idx'], c['cols'], c['name'] + for idx,col in zip(Idx,cols): + Values[idx-1,ic]=ts[col] + nMissing = np.sum(np.isnan(Values[:,ic])) + if len(cols)nrMax: + print('[WARN] More values found for {}, found {}/{}'.format(colname,len(cols),nrMax)) + df = pd.DataFrame(data=Values, columns=ColNames) + df = df.reindex(sorted(df.columns), axis=1) + return df + + +def _BDSpanMap(): + BDSpanMap=dict() + for sB in ['B1','B2','B3']: + # Old nodal outputs + BDSpanMap['^'+sB+r'N(\d)TDxr_\[m\]'] = sB+'TDxr_[m]' + BDSpanMap['^'+sB+r'N(\d)TDyr_\[m\]'] = sB+'TDyr_[m]' + BDSpanMap['^'+sB+r'N(\d)TDzr_\[m\]'] = sB+'TDzr_[m]' + # New nodal outputs + BDSpanMap['^'+sB+r'N(\d*)_FxL_\[N\]'] = sB+'FxL_[N]' + BDSpanMap['^'+sB+r'N(\d*)_FxL_\[N\]'] = sB+'FxL_[N]' + BDSpanMap['^'+sB+r'N(\d*)_FxL_\[N\]'] = sB+'FxL_[N]' + BDSpanMap['^'+sB+r'N(\d*)_MxL_\[N-m\]'] = sB+'MxL_[N-m]' + BDSpanMap['^'+sB+r'N(\d*)_MxL_\[N-m\]'] = sB+'MxL_[N-m]' + BDSpanMap['^'+sB+r'N(\d*)_MxL_\[N-m\]'] = sB+'MxL_[N-m]' + BDSpanMap['^'+sB+r'N(\d*)_Fxr_\[N\]'] = sB+'Fxr_[N]' + BDSpanMap['^'+sB+r'N(\d*)_Fxr_\[N\]'] = sB+'Fxr_[N]' + BDSpanMap['^'+sB+r'N(\d*)_Fxr_\[N\]'] = sB+'Fxr_[N]' + BDSpanMap['^'+sB+r'N(\d*)_Mxr_\[N-m\]'] = sB+'Mxr_[N-m]' + BDSpanMap['^'+sB+r'N(\d*)_Mxr_\[N-m\]'] = sB+'Mxr_[N-m]' + BDSpanMap['^'+sB+r'N(\d*)_Mxr_\[N-m\]'] = sB+'Mxr_[N-m]' + BDSpanMap['^'+sB+r'N(\d*)_TDxr_\[m\]'] = sB+'TDxr_[m]' + BDSpanMap['^'+sB+r'N(\d*)_TDyr_\[m\]'] = sB+'TDyr_[m]' + BDSpanMap['^'+sB+r'N(\d*)_TDzr_\[m\]'] = sB+'TDzr_[m]' + BDSpanMap['^'+sB+r'N(\d*)_RDxr_\[-\]'] = sB+'RDxr_[-]' + BDSpanMap['^'+sB+r'N(\d*)_RDyr_\[-\]'] = sB+'RDyr_[-]' + BDSpanMap['^'+sB+r'N(\d*)_RDzr_\[-\]'] = sB+'RDzr_[-]' + BDSpanMap['^'+sB+r'N(\d*)_AbsXg_\[m\]'] = sB+'AbsXg_[m]' + BDSpanMap['^'+sB+r'N(\d*)_AbsYg_\[m\]'] = sB+'AbsYg_[m]' + BDSpanMap['^'+sB+r'N(\d*)_AbsZg_\[m\]'] = sB+'AbsZg_[m]' + BDSpanMap['^'+sB+r'N(\d*)_AbsXr_\[m\]'] = sB+'AbsXr_[m]' + BDSpanMap['^'+sB+r'N(\d*)_AbsYr_\[m\]'] = sB+'AbsYr_[m]' + BDSpanMap['^'+sB+r'N(\d*)_AbsZr_\[m\]'] = sB+'AbsZr_[m]' + BDSpanMap['^'+sB+r'N(\d*)_TVxg_\[m/s\]'] = sB+'TVxg_[m/s]' + BDSpanMap['^'+sB+r'N(\d*)_TVyg_\[m/s\]'] = sB+'TVyg_[m/s]' + BDSpanMap['^'+sB+r'N(\d*)_TVzg_\[m/s\]'] = sB+'TVzg_[m/s]' + BDSpanMap['^'+sB+r'N(\d*)_TVxl_\[m/s\]'] = sB+'TVxl_[m/s]' + BDSpanMap['^'+sB+r'N(\d*)_TVyl_\[m/s\]'] = sB+'TVyl_[m/s]' + BDSpanMap['^'+sB+r'N(\d*)_TVzl_\[m/s\]'] = sB+'TVzl_[m/s]' + BDSpanMap['^'+sB+r'N(\d*)_TVxr_\[m/s\]'] = sB+'TVxr_[m/s]' + BDSpanMap['^'+sB+r'N(\d*)_TVyr_\[m/s\]'] = sB+'TVyr_[m/s]' + BDSpanMap['^'+sB+r'N(\d*)_TVzr_\[m/s\]'] = sB+'TVzr_[m/s]' + BDSpanMap['^'+sB+r'N(\d*)_RVxg_\[deg/s\]'] = sB+'RVxg_[deg/s]' + BDSpanMap['^'+sB+r'N(\d*)_RVyg_\[deg/s\]'] = sB+'RVyg_[deg/s]' + BDSpanMap['^'+sB+r'N(\d*)_RVzg_\[deg/s\]'] = sB+'RVzg_[deg/s]' + BDSpanMap['^'+sB+r'N(\d*)_RVxl_\[deg/s\]'] = sB+'RVxl_[deg/s]' + BDSpanMap['^'+sB+r'N(\d*)_RVyl_\[deg/s\]'] = sB+'RVyl_[deg/s]' + BDSpanMap['^'+sB+r'N(\d*)_RVzl_\[deg/s\]'] = sB+'RVzl_[deg/s]' + BDSpanMap['^'+sB+r'N(\d*)_RVxr_\[deg/s\]'] = sB+'RVxr_[deg/s]' + BDSpanMap['^'+sB+r'N(\d*)_RVyr_\[deg/s\]'] = sB+'RVyr_[deg/s]' + BDSpanMap['^'+sB+r'N(\d*)_RVzr_\[deg/s\]'] = sB+'RVzr_[deg/s]' + BDSpanMap['^'+sB+r'N(\d*)_TAxl_\[m/s^2\]'] = sB+'TAxl_[m/s^2]' + BDSpanMap['^'+sB+r'N(\d*)_TAyl_\[m/s^2\]'] = sB+'TAyl_[m/s^2]' + BDSpanMap['^'+sB+r'N(\d*)_TAzl_\[m/s^2\]'] = sB+'TAzl_[m/s^2]' + BDSpanMap['^'+sB+r'N(\d*)_TAxr_\[m/s^2\]'] = sB+'TAxr_[m/s^2]' + BDSpanMap['^'+sB+r'N(\d*)_TAyr_\[m/s^2\]'] = sB+'TAyr_[m/s^2]' + BDSpanMap['^'+sB+r'N(\d*)_TAzr_\[m/s^2\]'] = sB+'TAzr_[m/s^2]' + BDSpanMap['^'+sB+r'N(\d*)_RAxl_\[deg/s^2\]'] = sB+'RAxl_[deg/s^2]' + BDSpanMap['^'+sB+r'N(\d*)_RAyl_\[deg/s^2\]'] = sB+'RAyl_[deg/s^2]' + BDSpanMap['^'+sB+r'N(\d*)_RAzl_\[deg/s^2\]'] = sB+'RAzl_[deg/s^2]' + BDSpanMap['^'+sB+r'N(\d*)_RAxr_\[deg/s^2\]'] = sB+'RAxr_[deg/s^2]' + BDSpanMap['^'+sB+r'N(\d*)_RAyr_\[deg/s^2\]'] = sB+'RAyr_[deg/s^2]' + BDSpanMap['^'+sB+r'N(\d*)_RAzr_\[deg/s^2\]'] = sB+'RAzr_[deg/s^2]' + BDSpanMap['^'+sB+r'N(\d*)_PFxL_\[N\]'] = sB+'PFxL_[N]' + BDSpanMap['^'+sB+r'N(\d*)_PFyL_\[N\]'] = sB+'PFyL_[N]' + BDSpanMap['^'+sB+r'N(\d*)_PFzL_\[N\]'] = sB+'PFzL_[N]' + BDSpanMap['^'+sB+r'N(\d*)_PMxL_\[N-m\]'] = sB+'PMxL_[N-m]' + BDSpanMap['^'+sB+r'N(\d*)_PMyL_\[N-m\]'] = sB+'PMyL_[N-m]' + BDSpanMap['^'+sB+r'N(\d*)_PMzL_\[N-m\]'] = sB+'PMzL_[N-m]' + BDSpanMap['^'+sB+r'N(\d*)_DFxL_\[N/m\]'] = sB+'DFxL_[N/m]' + BDSpanMap['^'+sB+r'N(\d*)_DFyL_\[N/m\]'] = sB+'DFyL_[N/m]' + BDSpanMap['^'+sB+r'N(\d*)_DFzL_\[N/m\]'] = sB+'DFzL_[N/m]' + BDSpanMap['^'+sB+r'N(\d*)_DMxL_\[N-m/m\]'] = sB+'DMxL_[N-m/m]' + BDSpanMap['^'+sB+r'N(\d*)_DMyL_\[N-m/m\]'] = sB+'DMyL_[N-m/m]' + BDSpanMap['^'+sB+r'N(\d*)_DMzL_\[N-m/m\]'] = sB+'DMzL_[N-m/m]' + BDSpanMap['^'+sB+r'N(\d*)_DFxR_\[N/m\]'] = sB+'DFxR_[N/m]' + BDSpanMap['^'+sB+r'N(\d*)_DFyR_\[N/m\]'] = sB+'DFyR_[N/m]' + BDSpanMap['^'+sB+r'N(\d*)_DFzR_\[N/m\]'] = sB+'DFzR_[N/m]' + BDSpanMap['^'+sB+r'N(\d*)_DMxR_\[N-m/m\]'] = sB+'DMxR_[N-m/m]' + BDSpanMap['^'+sB+r'N(\d*)_DMyR_\[N-m/m\]'] = sB+'DMyR_[N-m/m]' + BDSpanMap['^'+sB+r'N(\d*)_DMzR_\[N-m/m\]'] = sB+'DMzR_[N-m/m]' + BDSpanMap['^'+sB+r'N(\d*)_FFbxl_\[N\]'] = sB+'FFbxl_[N]' + BDSpanMap['^'+sB+r'N(\d*)_FFbyl_\[N\]'] = sB+'FFbyl_[N]' + BDSpanMap['^'+sB+r'N(\d*)_FFbzl_\[N\]'] = sB+'FFbzl_[N]' + BDSpanMap['^'+sB+r'N(\d*)_FFbxr_\[N\]'] = sB+'FFbxr_[N]' + BDSpanMap['^'+sB+r'N(\d*)_FFbyr_\[N\]'] = sB+'FFbyr_[N]' + BDSpanMap['^'+sB+r'N(\d*)_FFbzr_\[N\]'] = sB+'FFbzr_[N]' + BDSpanMap['^'+sB+r'N(\d*)_MFbxl_\[N-m\]'] = sB+'MFbxl_[N-m]' + BDSpanMap['^'+sB+r'N(\d*)_MFbyl_\[N-m\]'] = sB+'MFbyl_[N-m]' + BDSpanMap['^'+sB+r'N(\d*)_MFbzl_\[N-m\]'] = sB+'MFbzl_[N-m]' + BDSpanMap['^'+sB+r'N(\d*)_MFbxr_\[N-m\]'] = sB+'MFbxr_[N-m]' + BDSpanMap['^'+sB+r'N(\d*)_MFbyr_\[N-m\]'] = sB+'MFbyr_[N-m]' + BDSpanMap['^'+sB+r'N(\d*)_MFbzr_\[N-m\]'] = sB+'MFbzr_[N-m]' + BDSpanMap['^'+sB+r'N(\d*)_FFcxl_\[N\]'] = sB+'FFcxl_[N]' + BDSpanMap['^'+sB+r'N(\d*)_FFcyl_\[N\]'] = sB+'FFcyl_[N]' + BDSpanMap['^'+sB+r'N(\d*)_FFczl_\[N\]'] = sB+'FFczl_[N]' + BDSpanMap['^'+sB+r'N(\d*)_FFcxr_\[N\]'] = sB+'FFcxr_[N]' + BDSpanMap['^'+sB+r'N(\d*)_FFcyr_\[N\]'] = sB+'FFcyr_[N]' + BDSpanMap['^'+sB+r'N(\d*)_FFczr_\[N\]'] = sB+'FFczr_[N]' + BDSpanMap['^'+sB+r'N(\d*)_MFcxl_\[N-m\]'] = sB+'MFcxl_[N-m]' + BDSpanMap['^'+sB+r'N(\d*)_MFcyl_\[N-m\]'] = sB+'MFcyl_[N-m]' + BDSpanMap['^'+sB+r'N(\d*)_MFczl_\[N-m\]'] = sB+'MFczl_[N-m]' + BDSpanMap['^'+sB+r'N(\d*)_MFcxr_\[N-m\]'] = sB+'MFcxr_[N-m]' + BDSpanMap['^'+sB+r'N(\d*)_MFcyr_\[N-m\]'] = sB+'MFcyr_[N-m]' + BDSpanMap['^'+sB+r'N(\d*)_MFczr_\[N-m\]'] = sB+'MFczr_[N-m]' + BDSpanMap['^'+sB+r'N(\d*)_FFdxl_\[N\]'] = sB+'FFdxl_[N]' + BDSpanMap['^'+sB+r'N(\d*)_FFdyl_\[N\]'] = sB+'FFdyl_[N]' + BDSpanMap['^'+sB+r'N(\d*)_FFdzl_\[N\]'] = sB+'FFdzl_[N]' + BDSpanMap['^'+sB+r'N(\d*)_FFdxr_\[N\]'] = sB+'FFdxr_[N]' + BDSpanMap['^'+sB+r'N(\d*)_FFdyr_\[N\]'] = sB+'FFdyr_[N]' + BDSpanMap['^'+sB+r'N(\d*)_FFdzr_\[N\]'] = sB+'FFdzr_[N]' + BDSpanMap['^'+sB+r'N(\d*)_MFdxl_\[N-m\]'] = sB+'MFdxl_[N-m]' + BDSpanMap['^'+sB+r'N(\d*)_MFdyl_\[N-m\]'] = sB+'MFdyl_[N-m]' + BDSpanMap['^'+sB+r'N(\d*)_MFdzl_\[N-m\]'] = sB+'MFdzl_[N-m]' + BDSpanMap['^'+sB+r'N(\d*)_MFdxr_\[N-m\]'] = sB+'MFdxr_[N-m]' + BDSpanMap['^'+sB+r'N(\d*)_MFdyr_\[N-m\]'] = sB+'MFdyr_[N-m]' + BDSpanMap['^'+sB+r'N(\d*)_MFdzr_\[N-m\]'] = sB+'MFdzr_[N-m]' + BDSpanMap['^'+sB+r'N(\d*)_FFgxl_\[N\]'] = sB+'FFgxl_[N]' + BDSpanMap['^'+sB+r'N(\d*)_FFgyl_\[N\]'] = sB+'FFgyl_[N]' + BDSpanMap['^'+sB+r'N(\d*)_FFgzl_\[N\]'] = sB+'FFgzl_[N]' + BDSpanMap['^'+sB+r'N(\d*)_FFgxr_\[N\]'] = sB+'FFgxr_[N]' + BDSpanMap['^'+sB+r'N(\d*)_FFgyr_\[N\]'] = sB+'FFgyr_[N]' + BDSpanMap['^'+sB+r'N(\d*)_FFgzr_\[N\]'] = sB+'FFgzr_[N]' + BDSpanMap['^'+sB+r'N(\d*)_MFgxl_\[N-m\]'] = sB+'MFgxl_[N-m]' + BDSpanMap['^'+sB+r'N(\d*)_MFgyl_\[N-m\]'] = sB+'MFgyl_[N-m]' + BDSpanMap['^'+sB+r'N(\d*)_MFgzl_\[N-m\]'] = sB+'MFgzl_[N-m]' + BDSpanMap['^'+sB+r'N(\d*)_MFgxr_\[N-m\]'] = sB+'MFgxr_[N-m]' + BDSpanMap['^'+sB+r'N(\d*)_MFgyr_\[N-m\]'] = sB+'MFgyr_[N-m]' + BDSpanMap['^'+sB+r'N(\d*)_MFgzr_\[N-m\]'] = sB+'MFgzr_[N-m]' + BDSpanMap['^'+sB+r'N(\d*)_FFixl_\[N\]'] = sB+'FFixl_[N]' + BDSpanMap['^'+sB+r'N(\d*)_FFiyl_\[N\]'] = sB+'FFiyl_[N]' + BDSpanMap['^'+sB+r'N(\d*)_FFizl_\[N\]'] = sB+'FFizl_[N]' + BDSpanMap['^'+sB+r'N(\d*)_FFixr_\[N\]'] = sB+'FFixr_[N]' + BDSpanMap['^'+sB+r'N(\d*)_FFiyr_\[N\]'] = sB+'FFiyr_[N]' + BDSpanMap['^'+sB+r'N(\d*)_FFizr_\[N\]'] = sB+'FFizr_[N]' + BDSpanMap['^'+sB+r'N(\d*)_MFixl_\[N-m\]'] = sB+'MFixl_[N-m]' + BDSpanMap['^'+sB+r'N(\d*)_MFiyl_\[N-m\]'] = sB+'MFiyl_[N-m]' + BDSpanMap['^'+sB+r'N(\d*)_MFizl_\[N-m\]'] = sB+'MFizl_[N-m]' + BDSpanMap['^'+sB+r'N(\d*)_MFixr_\[N-m\]'] = sB+'MFixr_[N-m]' + BDSpanMap['^'+sB+r'N(\d*)_MFiyr_\[N-m\]'] = sB+'MFiyr_[N-m]' + BDSpanMap['^'+sB+r'N(\d*)_MFizr_\[N-m\]'] = sB+'MFizr_[N-m]' + return BDSpanMap + + +def spanwiseColBD(Cols): + """ Return column info, available columns and indices that contain BD spanwise data""" + BDSpanMap = _BDSpanMap() + return find_matching_columns(Cols, BDSpanMap) + +def spanwiseColED(Cols): + """ Return column info, available columns and indices that contain ED spanwise data""" + EDSpanMap=dict() + # All Outs + for sB in ['B1','B2','B3']: + EDSpanMap['^[A]*'+sB+r'N(\d*)ALx_\[m/s^2\]' ] = sB+'ALx_[m/s^2]' + EDSpanMap['^[A]*'+sB+r'N(\d*)ALy_\[m/s^2\]' ] = sB+'ALy_[m/s^2]' + EDSpanMap['^[A]*'+sB+r'N(\d*)ALz_\[m/s^2\]' ] = sB+'ALz_[m/s^2]' + EDSpanMap['^[A]*'+sB+r'N(\d*)TDx_\[m\]' ] = sB+'TDx_[m]' + EDSpanMap['^[A]*'+sB+r'N(\d*)TDy_\[m\]' ] = sB+'TDy_[m]' + EDSpanMap['^[A]*'+sB+r'N(\d*)TDz_\[m\]' ] = sB+'TDz_[m]' + EDSpanMap['^[A]*'+sB+r'N(\d*)RDx_\[deg\]' ] = sB+'RDx_[deg]' + EDSpanMap['^[A]*'+sB+r'N(\d*)RDy_\[deg\]' ] = sB+'RDy_[deg]' + EDSpanMap['^[A]*'+sB+r'N(\d*)RDz_\[deg\]' ] = sB+'RDz_[deg]' + EDSpanMap['^[A]*'+sB+r'N(\d*)MLx_\[kN-m\]' ] = sB+'MLx_[kN-m]' + EDSpanMap['^[A]*'+sB+r'N(\d*)MLy_\[kN-m\]' ] = sB+'MLy_[kN-m]' + EDSpanMap['^[A]*'+sB+r'N(\d*)MLz_\[kN-m\]' ] = sB+'MLz_[kN-m]' + EDSpanMap['^[A]*'+sB+r'N(\d*)FLx_\[kN\]' ] = sB+'FLx_[kN]' + EDSpanMap['^[A]*'+sB+r'N(\d*)FLy_\[kN\]' ] = sB+'FLy_[kN]' + EDSpanMap['^[A]*'+sB+r'N(\d*)FLz_\[kN\]' ] = sB+'FLz_[kN]' + EDSpanMap['^[A]*'+sB+r'N(\d*)FLxNT_\[kN\]' ] = sB+'FLxNT_[kN]' + EDSpanMap['^[A]*'+sB+r'N(\d*)FLyNT_\[kN\]' ] = sB+'FLyNT_[kN]' + EDSpanMap['^[A]*'+sB+r'N(\d*)FlyNT_\[kN\]' ] = sB+'FLyNT_[kN]' # <<< Unfortunate + EDSpanMap['^[A]*'+sB+r'N(\d*)MLxNT_\[kN-m\]'] = sB+'MLxNT_[kN-m]' + EDSpanMap['^[A]*'+sB+r'N(\d*)MLyNT_\[kN-m\]'] = sB+'MLyNT_[kN-m]' + # Old + for sB in ['b1','b2','b3']: + SB=sB.upper() + EDSpanMap[r'^Spn(\d)ALx'+sB+r'_\[m/s^2\]']=SB+'ALx_[m/s^2]' + EDSpanMap[r'^Spn(\d)ALy'+sB+r'_\[m/s^2\]']=SB+'ALy_[m/s^2]' + EDSpanMap[r'^Spn(\d)ALz'+sB+r'_\[m/s^2\]']=SB+'ALz_[m/s^2]' + EDSpanMap[r'^Spn(\d)TDx'+sB+r'_\[m\]' ]=SB+'TDx_[m]' + EDSpanMap[r'^Spn(\d)TDy'+sB+r'_\[m\]' ]=SB+'TDy_[m]' + EDSpanMap[r'^Spn(\d)TDz'+sB+r'_\[m\]' ]=SB+'TDz_[m]' + EDSpanMap[r'^Spn(\d)RDx'+sB+r'_\[deg\]' ]=SB+'RDx_[deg]' + EDSpanMap[r'^Spn(\d)RDy'+sB+r'_\[deg\]' ]=SB+'RDy_[deg]' + EDSpanMap[r'^Spn(\d)RDz'+sB+r'_\[deg\]' ]=SB+'RDz_[deg]' + EDSpanMap[r'^Spn(\d)FLx'+sB+r'_\[kN\]' ]=SB+'FLx_[kN]' + EDSpanMap[r'^Spn(\d)FLy'+sB+r'_\[kN\]' ]=SB+'FLy_[kN]' + EDSpanMap[r'^Spn(\d)FLz'+sB+r'_\[kN\]' ]=SB+'FLz_[kN]' + EDSpanMap[r'^Spn(\d)MLy'+sB+r'_\[kN-m\]' ]=SB+'MLx_[kN-m]' + EDSpanMap[r'^Spn(\d)MLx'+sB+r'_\[kN-m\]' ]=SB+'MLy_[kN-m]' + EDSpanMap[r'^Spn(\d)MLz'+sB+r'_\[kN-m\]' ]=SB+'MLz_[kN-m]' + return find_matching_columns(Cols, EDSpanMap) + +def spanwiseColAD(Cols): + """ Return column info, available columns and indices that contain AD spanwise data""" + ADSpanMap=dict() + for sB in ['B1','B2','B3']: + ADSpanMap['^[A]*'+sB+r'N(\d*)Alpha_\[deg\]']=sB+'Alpha_[deg]' + ADSpanMap['^[A]*'+sB+r'N(\d*)AOA_\[deg\]' ]=sB+'Alpha_[deg]' # DBGOuts + ADSpanMap['^[A]*'+sB+r'N(\d*)AxInd_\[-\]' ]=sB+'AxInd_[-]' + ADSpanMap['^[A]*'+sB+r'N(\d*)TnInd_\[-\]' ]=sB+'TnInd_[-]' + ADSpanMap['^[A]*'+sB+r'N(\d*)AIn_\[deg\]' ]=sB+'AxInd_[-]' # DBGOuts NOTE BUG Unit + ADSpanMap['^[A]*'+sB+r'N(\d*)ApI_\[deg\]' ]=sB+'TnInd_[-]' # DBGOuts NOTE BUG Unit + ADSpanMap['^[A]*'+sB+r'N(\d*)AIn_\[-\]' ]=sB+'AxInd_[-]' # DBGOuts + ADSpanMap['^[A]*'+sB+r'N(\d*)ApI_\[-\]' ]=sB+'TnInd_[-]' # DBGOuts + ADSpanMap['^[A]*'+sB+r'N(\d*)Uin_\[m/s\]' ]=sB+'Uin_[m/s]' # DBGOuts + ADSpanMap['^[A]*'+sB+r'N(\d*)Uit_\[m/s\]' ]=sB+'Uit_[m/s]' # DBGOuts + ADSpanMap['^[A]*'+sB+r'N(\d*)Uir_\[m/s\]' ]=sB+'Uir_[m/s]' # DBGOuts + ADSpanMap['^[A]*'+sB+r'N(\d*)Cl_\[-\]' ]=sB+'Cl_[-]' + ADSpanMap['^[A]*'+sB+r'N(\d*)Cd_\[-\]' ]=sB+'Cd_[-]' + ADSpanMap['^[A]*'+sB+r'N(\d*)Cm_\[-\]' ]=sB+'Cm_[-]' + ADSpanMap['^[A]*'+sB+r'N(\d*)Cx_\[-\]' ]=sB+'Cx_[-]' + ADSpanMap['^[A]*'+sB+r'N(\d*)Cy_\[-\]' ]=sB+'Cy_[-]' + ADSpanMap['^[A]*'+sB+r'N(\d*)Cn_\[-\]' ]=sB+'Cn_[-]' + ADSpanMap['^[A]*'+sB+r'N(\d*)Ct_\[-\]' ]=sB+'Ct_[-]' + ADSpanMap['^[A]*'+sB+r'N(\d*)Re_\[-\]' ]=sB+'Re_[-]' + ADSpanMap['^[A]*'+sB+r'N(\d*)Vrel_\[m/s\]' ]=sB+'Vrel_[m/s]' + ADSpanMap['^[A]*'+sB+r'N(\d*)Theta_\[deg\]']=sB+'Theta_[deg]' + ADSpanMap['^[A]*'+sB+r'N(\d*)Phi_\[deg\]' ]=sB+'Phi_[deg]' + ADSpanMap['^[A]*'+sB+r'N(\d*)Twst_\[deg\]' ]=sB+'Twst_[deg]' #DBGOuts + ADSpanMap['^[A]*'+sB+r'N(\d*)Curve_\[deg\]']=sB+'Curve_[deg]' + ADSpanMap['^[A]*'+sB+r'N(\d*)Vindx_\[m/s\]']=sB+'Vindx_[m/s]' + ADSpanMap['^[A]*'+sB+r'N(\d*)Vindy_\[m/s\]']=sB+'Vindy_[m/s]' + ADSpanMap['^[A]*'+sB+r'N(\d*)Fx_\[N/m\]' ]=sB+'Fx_[N/m]' + ADSpanMap['^[A]*'+sB+r'N(\d*)Fy_\[N/m\]' ]=sB+'Fy_[N/m]' + ADSpanMap['^[A]*'+sB+r'N(\d*)Fl_\[N/m\]' ]=sB+'Fl_[N/m]' + ADSpanMap['^[A]*'+sB+r'N(\d*)Fd_\[N/m\]' ]=sB+'Fd_[N/m]' + ADSpanMap['^[A]*'+sB+r'N(\d*)Fn_\[N/m\]' ]=sB+'Fn_[N/m]' + ADSpanMap['^[A]*'+sB+r'N(\d*)Ft_\[N/m\]' ]=sB+'Ft_[N/m]' + ADSpanMap['^[A]*'+sB+r'N(\d*)VUndx_\[m/s\]']=sB+'VUndx_[m/s]' + ADSpanMap['^[A]*'+sB+r'N(\d*)VUndy_\[m/s\]']=sB+'VUndy_[m/s]' + ADSpanMap['^[A]*'+sB+r'N(\d*)VUndz_\[m/s\]']=sB+'VUndz_[m/s]' + ADSpanMap['^[A]*'+sB+r'N(\d*)VDisx_\[m/s\]']=sB+'VDisx_[m/s]' + ADSpanMap['^[A]*'+sB+r'N(\d*)VDisy_\[m/s\]']=sB+'VDisy_[m/s]' + ADSpanMap['^[A]*'+sB+r'N(\d*)VDisz_\[m/s\]']=sB+'VDisz_[m/s]' + ADSpanMap['^[A]*'+sB+r'N(\d*)STVx_\[m/s\]' ]=sB+'STVx_[m/s]' + ADSpanMap['^[A]*'+sB+r'N(\d*)STVy_\[m/s\]' ]=sB+'STVy_[m/s]' + ADSpanMap['^[A]*'+sB+r'N(\d*)STVz_\[m/s\]' ]=sB+'STVz_[m/s]' + ADSpanMap['^[A]*'+sB+r'N(\d*)Vx_\[m/s\]' ]=sB+'Vx_[m/s]' + ADSpanMap['^[A]*'+sB+r'N(\d*)Vy_\[m/s\]' ]=sB+'Vy_[m/s]' + ADSpanMap['^[A]*'+sB+r'N(\d*)Vz_\[m/s\]' ]=sB+'Vz_[m/s]' + ADSpanMap['^[A]*'+sB+r'N(\d*)DynP_\[Pa\]' ]=sB+'DynP_[Pa]' + ADSpanMap['^[A]*'+sB+r'N(\d*)M_\[-\]' ]=sB+'M_[-]' + ADSpanMap['^[A]*'+sB+r'N(\d*)Mm_\[N-m/m\]' ]=sB+'Mm_[N-m/m]' + ADSpanMap['^[A]*'+sB+r'N(\d*)Gam_\[' ]=sB+'Gam_[m^2/s]' #DBGOuts + # --- AD 14 + ADSpanMap[r'^Alpha(\d*)_\[deg\]' ]='Alpha_[deg]' + ADSpanMap[r'^DynPres(\d*)_\[Pa\]' ]='DynPres_[Pa]' + ADSpanMap[r'^CLift(\d*)_\[-\]' ]='CLift_[-]' + ADSpanMap[r'^CDrag(\d*)_\[-\]' ]='CDrag_[-]' + ADSpanMap[r'^CNorm(\d*)_\[-\]' ]='CNorm_[-]' + ADSpanMap[r'^CTang(\d*)_\[-\]' ]='CTang_[-]' + ADSpanMap[r'^CMomt(\d*)_\[-\]' ]='CMomt_[-]' + ADSpanMap[r'^Pitch(\d*)_\[deg\]' ]='Pitch_[deg]' + ADSpanMap[r'^AxInd(\d*)_\[-\]' ]='AxInd_[-]' + ADSpanMap[r'^TanInd(\d*)_\[-\]' ]='TanInd_[-]' + ADSpanMap[r'^ForcN(\d*)_\[N\]' ]='ForcN_[N]' + ADSpanMap[r'^ForcT(\d*)_\[N\]' ]='ForcT_[N]' + ADSpanMap[r'^Pmomt(\d*)_\[N-m\]' ]='Pmomt_[N-N]' + ADSpanMap[r'^ReNum(\d*)_\[x10^6\]']='ReNum_[x10^6]' + ADSpanMap[r'^Gamma(\d*)_\[m^2/s\]']='Gamma_[m^2/s]' + + return find_matching_columns(Cols, ADSpanMap) + +def insert_extra_columns_AD(dfRad, tsAvg, vr=None, rho=None, R=None, nB=None, chord=None): + # --- Compute additional values (AD15 only) + if dfRad is None: + return None + if dfRad.shape[1]==0: + return dfRad + if chord is not None: + if vr is not None: + chord =chord[0:len(dfRad)] + for sB in ['B1','B2','B3']: + try: + vr_bar=vr/R + Fx = dfRad[sB+'Fx_[N/m]'] + U0 = tsAvg['Wind1VelX_[m/s]'] + Ct=nB*Fx/(0.5 * rho * 2 * U0**2 * np.pi * vr) + Ct[vr<0.01*R] = 0 + dfRad[sB+'Ctloc_[-]'] = Ct + CT=2*np.trapz(vr_bar*Ct,vr_bar) + dfRad[sB+'CtAvg_[-]']= CT*np.ones(vr.shape) + except: + pass + try: + dfRad[sB+'Gamma_[m^2/s]'] = 1/2 * chord* dfRad[sB+'Vrel_[m/s]'] * dfRad[sB+'Cl_[-]'] + except: + pass + try: + if not sB+'Vindx_[m/s]' in dfRad.columns: + dfRad[sB+'Vindx_[m/s]']= -dfRad[sB+'AxInd_[-]'].values * dfRad[sB+'Vx_[m/s]'].values + dfRad[sB+'Vindy_[m/s]']= dfRad[sB+'TnInd_[-]'].values * dfRad[sB+'Vy_[m/s]'].values + except: + pass + return dfRad + + + +def spanwisePostPro(FST_In=None,avgMethod='constantwindow',avgParam=5,out_ext='.outb',df=None): + """ + Postprocess FAST radial data + + INPUTS: + - FST_IN: Fast .fst input file + - avgMethod='periods', avgParam=2: average over 2 last periods, Needs Azimuth sensors!!! + - avgMethod='constantwindow', avgParam=5: average over 5s of simulation + - postprofile: outputfile to write radial data + """ + # --- Opens Fast output and performs averaging + if df is None: + df = FASTOutputFile(FST_In.replace('.fst',out_ext).replace('.dvr',out_ext)).toDataFrame() + returnDF=True + else: + returnDF=False + # NOTE: spanwise script doest not support duplicate columns + df = df.loc[:,~df.columns.duplicated()] + dfAvg = averageDF(df,avgMethod=avgMethod ,avgParam=avgParam) # NOTE: average 5 last seconds + + # --- Extract info (e.g. radial positions) from Fast input file + # We don't have a .fst input file, so we'll rely on some default values for "r" + rho = 1.225 + chord = None + # --- Extract radial positions of output channels + r_AD, r_ED, r_BD, IR_AD, IR_ED, IR_BD, R, r_hub, fst = FASTRadialOutputs(FST_In, OutputCols=df.columns.values) + if R is None: + R=1 + try: + chord = fst.AD.Bld1['BldAeroNodes'][:,5] # Full span + except: + pass + try: + rho = fst.AD['Rho'] + except: + try: + rho = fst.AD['AirDens'] + except: + pass + #print('r_AD:', r_AD) + #print('r_ED:', r_ED) + #print('r_BD:', r_BD) + #print('I_AD:', IR_AD) + #print('I_ED:', IR_ED) + #print('I_BD:', IR_BD) + # --- Extract radial data and export to csv if needed + dfRad_AD = None + dfRad_ED = None + dfRad_BD = None + Cols=dfAvg.columns.values + # --- AD + ColsInfoAD, nrMaxAD = spanwiseColAD(Cols) + dfRad_AD = extract_spanwise_data(ColsInfoAD, nrMaxAD, df=None, ts=dfAvg.iloc[0]) + dfRad_AD = insert_extra_columns_AD(dfRad_AD, dfAvg.iloc[0], vr=r_AD, rho=rho, R=R, nB=3, chord=chord) + dfRad_AD = insert_radial_columns(dfRad_AD, r_AD, R=R, IR=IR_AD) + # --- ED + ColsInfoED, nrMaxED = spanwiseColED(Cols) + dfRad_ED = extract_spanwise_data(ColsInfoED, nrMaxED, df=None, ts=dfAvg.iloc[0]) + dfRad_ED = insert_radial_columns(dfRad_ED, r_ED, R=R, IR=IR_ED) + # --- BD + ColsInfoBD, nrMaxBD = spanwiseColBD(Cols) + dfRad_BD = extract_spanwise_data(ColsInfoBD, nrMaxBD, df=None, ts=dfAvg.iloc[0]) + dfRad_BD = insert_radial_columns(dfRad_BD, r_BD, R=R, IR=IR_BD) + if returnDF: + return dfRad_ED , dfRad_AD, dfRad_BD, df + else: + return dfRad_ED , dfRad_AD, dfRad_BD + + + +def spanwisePostProRows(df, FST_In=None): + """ + Returns a 3D matrix: n x nSpan x nColumn where df is of size n x nColumn + + NOTE: this is really not optimal. Spanwise columns should be extracted only once.. + """ + # --- Extract info (e.g. radial positions) from Fast input file + # We don't have a .fst input file, so we'll rely on some default values for "r" + rho = 1.225 + chord = None + # --- Extract radial positions of output channels + r_AD, r_ED, r_BD, IR_AD, IR_ED, IR_BD, R, r_hub, fst = FASTRadialOutputs(FST_In, OutputCols=df.columns.values) + #print('r_AD:', r_AD) + #print('r_ED:', r_ED) + #print('r_BD:', r_BD) + if R is None: + R=1 + try: + chord = fst.AD.Bld1['BldAeroNodes'][:,5] # Full span + except: + pass + try: + rho = fst.AD['Rho'] + except: + try: + rho = fst.AD['AirDens'] + except: + pass + # --- Extract radial data for each azimuthal average + M_AD=None + M_ED=None + M_BD=None + Col_AD=None + Col_ED=None + Col_BD=None + v = df.index.values + + # --- Getting Column info + Cols=df.columns.values + if r_AD is not None: + ColsInfoAD, nrMaxAD = spanwiseColAD(Cols) + if r_ED is not None: + ColsInfoED, nrMaxED = spanwiseColED(Cols) + if r_BD is not None: + ColsInfoBD, nrMaxBD = spanwiseColBD(Cols) + for i,val in enumerate(v): + if r_AD is not None: + dfRad_AD = extract_spanwise_data(ColsInfoAD, nrMaxAD, df=None, ts=df.iloc[i]) + dfRad_AD = insert_extra_columns_AD(dfRad_AD, df.iloc[i], vr=r_AD, rho=rho, R=R, nB=3, chord=chord) + dfRad_AD = insert_radial_columns(dfRad_AD, r_AD, R=R, IR=IR_AD) + if i==0: + M_AD = np.zeros((len(v), len(dfRad_AD), len(dfRad_AD.columns))) + Col_AD=dfRad_AD.columns.values + M_AD[i, :, : ] = dfRad_AD.values + if r_ED is not None and len(r_ED)>0: + dfRad_ED = extract_spanwise_data(ColsInfoED, nrMaxED, df=None, ts=df.iloc[i]) + dfRad_ED = insert_radial_columns(dfRad_ED, r_ED, R=R, IR=IR_ED) + if i==0: + M_ED = np.zeros((len(v), len(dfRad_ED), len(dfRad_ED.columns))) + Col_ED=dfRad_ED.columns.values + M_ED[i, :, : ] = dfRad_ED.values + if r_BD is not None and len(r_BD)>0: + dfRad_BD = extract_spanwise_data(ColsInfoBD, nrMaxBD, df=None, ts=df.iloc[i]) + dfRad_BD = insert_radial_columns(dfRad_BD, r_BD, R=R, IR=IR_BD) + if i==0: + M_BD = np.zeros((len(v), len(dfRad_BD), len(dfRad_BD.columns))) + Col_BD=dfRad_BD.columns.values + M_BD[i, :, : ] = dfRad_BD.values + return M_AD, Col_AD, M_ED, Col_ED, M_BD, Col_BD + + +def FASTRadialOutputs(FST_In, OutputCols=None): + """ Returns radial positions where FAST has outputs + INPUTS: + FST_In: fast input file (.fst) + OUTPUTS: + r_AD: radial positions of FAST Outputs from the rotor center + """ + R = None + r_hub =0 + r_AD = None + r_ED = None + r_BD = None + IR_ED = None + IR_AD = None + IR_BD = None + fst=None + if FST_In is not None: + fst = FASTInputDeck(FST_In, readlist=['AD','ADbld','ED','BD','BDbld']) + # NOTE: all this below should be in FASTInputDeck + if fst.version == 'F7': + # --- FAST7 + if not hasattr(fst,'AD'): + raise Exception('The AeroDyn file couldn''t be found or read, from main file: '+FST_In) + r_AD,IR_AD = AD14_BldGag(fst.AD) + R = fst.fst['TipRad'] + try: + rho = fst.AD['Rho'] + except: + rho = fst.AD['AirDens'] + else: + # --- OpenFAST 2 + R = None + + # --- ElastoDyn + if 'NumTurbines' in fst.fst.keys(): + # AeroDyn driver... + if 'HubRad(1)' in fst.fst.keys(): + r_hub = fst.fst['HubRad(1)'] + else: + r_hub = fst.fst['BldHubRad_bl(1_1)'] + + elif not hasattr(fst,'ED'): + print('[WARN] The Elastodyn file couldn''t be found or read, from main file: '+FST_In) + #raise Exception('The Elastodyn file couldn''t be found or read, from main file: '+FST_In) + else: + R = fst.ED['TipRad'] + r_hub = fst.ED['HubRad'] + if fst.ED.hasNodal: + _, r_ED = ED_BldStations(fst.ED) + IR_ED =None + else: + r_ED, IR_ED = ED_BldGag(fst.ED) + + # --- BeamDyn + if fst.BD is not None: + if R is None: + R = r_BD_All[-1] # just in case ED file missing + if fst.BD.hasNodal: + r_BD = BD_BldStations(fst.BD, fst.BDbld) + else: + r_BD, IR_BD, r_BD_All = BD_BldGag(fst.BD) + r_BD= r_BD+r_hub + + # --- AeroDyn + if fst.AD is None: + print('[WARN] The AeroDyn file couldn''t be found or read, from main file: '+FST_In) + #raise Exception('The AeroDyn file couldn''t be found or read, from main file: '+FST_In) + else: + if fst.ADversion == 'AD15': + if fst.AD.Bld1 is None: + raise Exception('The AeroDyn blade file couldn''t be found or read, from main file: '+FST_In) + + if 'B1N001Cl_[-]' in OutputCols or np.any(np.char.find(list(OutputCols),'AB1N')==0): + # This was compiled with all outs + r_AD = fst.AD.Bld1['BldAeroNodes'][:,0] # Full span + r_AD += r_hub + IR_AD = None + else: + r_AD,_ = AD_BldGag(fst.AD,fst.AD.Bld1, chordOut = True) # Only at Gages locations + r_AD += r_hub + + if R is None: + # ElastoDyn was not read, we use R from AD + R = fst.AD.Bld1['BldAeroNodes'][-1,0] + + elif fst.ADversion == 'AD14': + r_AD,IR_AD = AD14_BldGag(fst.AD) + + else: + raise Exception('AeroDyn version unknown') + return r_AD, r_ED, r_BD, IR_AD, IR_ED, IR_BD, R, r_hub, fst + + + +def addToOutlist(OutList, Signals): + if not isinstance(Signals,list): + raise Exception('Signals must be a list') + for s in Signals: + ss=s.split()[0].strip().strip('"').strip('\'') + AlreadyIn = any([o.find(ss)==1 for o in OutList ]) + if not AlreadyIn: + OutList.append(s) + return OutList + + + +# --------------------------------------------------------------------------------} +# --- Generic df +# --------------------------------------------------------------------------------{ +def remap_df(df, ColMap, bColKeepNewOnly=False, inPlace=False, dataDict=None, verbose=False): + """ Add/rename columns of a dataframe, potentially perform operations between columns + + dataDict: dicitonary of data to be made available as "variable" in the column mapping + + Example: + + ColumnMap={ + 'WS_[m/s]' : '{Wind1VelX_[m/s]}' , # create a new column from existing one + 'RtTSR_[-]' : '{RtTSR_[-]} * 2 + {RtAeroCt_[-]}' , # change value of column + 'RotSpeed_[rad/s]' : '{RotSpeed_[rpm]} * 2*np.pi/60 ', # new column [rpm] -> [rad/s] + } + # Read + df = weio.read('FASTOutBin.outb').toDataFrame() + # Change columns based on formulae, potentially adding new columns + df = fastlib.remap_df(df, ColumnMap, inplace=True) + + """ + # Insert dataDict into namespace + if dataDict is not None: + for k,v in dataDict.items(): + exec('{:s} = dataDict["{:s}"]'.format(k,k)) + + + if not inPlace: + df=df.copy() + ColMapMiss=[] + ColNew=[] + RenameMap=dict() + # Loop for expressions + for k0,v in ColMap.items(): + k=k0.strip() + v=v.strip() + if v.find('{')>=0: + search_results = re.finditer(r'\{.*?\}', v) + expr=v + if verbose: + print('Attempt to insert column {:15s} with expr {}'.format(k,v)) + # For more advanced operations, we use an eval + bFail=False + for item in search_results: + col=item.group(0)[1:-1] + if col not in df.columns: + ColMapMiss.append(col) + bFail=True + expr=expr.replace(item.group(0),'df[\''+col+'\']') + #print(k0, '=', expr) + if not bFail: + df[k]=eval(expr) + ColNew.append(k) + else: + print('[WARN] Column not present in dataframe, cannot evaluate: ',expr) + else: + #print(k0,'=',v) + if v not in df.columns: + ColMapMiss.append(v) + print('[WARN] Column not present in dataframe: ',v) + else: + RenameMap[k]=v + + # Applying renaming only now so that expressions may be applied in any order + for k,v in RenameMap.items(): + if verbose: + print('Renaming column {:15s} > {}'.format(v,k)) + k=k.strip() + iCol = list(df.columns).index(v) + df.columns.values[iCol]=k + ColNew.append(k) + df.columns = df.columns.values # Hack to ensure columns are updated + + if len(ColMapMiss)>0: + print('[FAIL] The following columns were not found in the dataframe:',ColMapMiss) + #print('Available columns are:',df.columns.values) + + if bColKeepNewOnly: + ColNew = [c for c,_ in ColMap.items() if c in ColNew]# Making sure we respec order from user + ColKeepSafe = [c for c in ColNew if c in df.columns.values] + ColKeepMiss = [c for c in ColNew if c not in df.columns.values] + if len(ColKeepMiss)>0: + print('[WARN] Signals missing and omitted for ColKeep:\n '+'\n '.join(ColKeepMiss)) + df=df[ColKeepSafe] + return df + + +# --------------------------------------------------------------------------------} +# --- Tools for PostProcessing one or several simulations +# --------------------------------------------------------------------------------{ +def _zero_crossings(y,x=None,direction=None): + """ + Find zero-crossing points in a discrete vector, using linear interpolation. + direction: 'up' or 'down', to select only up-crossings or down-crossings + Returns: + x values xzc such that y(yzc)==0 + indexes izc, such that the zero is between y[izc] (excluded) and y[izc+1] (included) + if direction is not provided, also returns: + sign, equal to 1 for up crossing + """ + y=np.asarray(y) + if x is None: + x=np.arange(len(y)) + + if np.any((x[1:] - x[0:-1]) <= 0.0): + raise Exception('x values need to be in ascending order') + + # Indices before zero-crossing + iBef = np.where(y[1:]*y[0:-1] < 0.0)[0] + + # Find the zero crossing by linear interpolation + xzc = x[iBef] - y[iBef] * (x[iBef+1] - x[iBef]) / (y[iBef+1] - y[iBef]) + + # Selecting points that are exactly 0 and where neighbor change sign + iZero = np.where(y == 0.0)[0] + iZero = iZero[np.where((iZero > 0) & (iZero < x.size-1))] + iZero = iZero[np.where(y[iZero-1]*y[iZero+1] < 0.0)] + + # Concatenate + xzc = np.concatenate((xzc, x[iZero])) + iBef = np.concatenate((iBef, iZero)) + + # Sort + iSort = np.argsort(xzc) + xzc, iBef = xzc[iSort], iBef[iSort] + + # Return up-crossing, down crossing or both + sign = np.sign(y[iBef+1]-y[iBef]) + if direction == 'up': + I= np.where(sign==1)[0] + return xzc[I],iBef[I] + elif direction == 'down': + I= np.where(sign==-1)[0] + return xzc[I],iBef[I] + elif direction is not None: + raise Exception('Direction should be either `up` or `down`') + return xzc, iBef, sign + +def find_matching_pattern(List, pattern): + """ Return elements of a list of strings that match a pattern + and return the first matching group + """ + reg_pattern=re.compile(pattern) + MatchedElements=[] + MatchedStrings=[] + for l in List: + match=reg_pattern.search(l) + if match: + MatchedElements.append(l) + if len(match.groups(1))>0: + MatchedStrings.append(match.groups(1)[0]) + else: + MatchedStrings.append('') + return MatchedElements, MatchedStrings + + + +def extractSpanTSReg(ts, col_pattern, colname, IR=None): + r""" Helper function to extract spanwise results, like B1N1Cl B1N2Cl etc. + + Example + col_pattern: r'B1N(\d*)Cl_\[-\]' + colname : r'B1Cl_[-]' + """ + # Extracting columns matching pattern + cols, sIdx = find_matching_pattern(ts.keys(), col_pattern) + if len(cols) ==0: + return (None,None) + + # Sorting by ID + cols = np.asarray(cols) + Idx = np.array([int(s) for s in sIdx]) + Isort = np.argsort(Idx) + Idx = Idx[Isort] + cols = cols[Isort] + + nrMax = np.max(Idx) + Values = np.zeros((nrMax,1)) + Values[:] = np.nan +# if IR is None: +# cols = [col_pattern.format(ir+1) for ir in range(nr)] +# else: +# cols = [col_pattern.format(ir) for ir in IR] + for idx,col in zip(Idx,cols): + Values[idx-1]=ts[col] + nMissing = np.sum(np.isnan(Values)) + if nMissing==nrMax: + return (None,None) + if len(cols)nrMax: + print('[WARN] More values found for {}, found {}/{}'.format(colname,len(cols),nrMax)) + return (colname,Values) + +def extractSpanTS(ts, nr, col_pattern, colname, IR=None): + """ Helper function to extract spanwise results, like B1N1Cl B1N2Cl etc. + + Example + col_pattern: 'B1N{:d}Cl_[-]' + colname : 'B1Cl_[-]' + """ + Values=np.zeros((nr,1)) + if IR is None: + cols = [col_pattern.format(ir+1) for ir in range(nr)] + else: + cols = [col_pattern.format(ir) for ir in IR] + colsExist = [c for c in cols if c in ts.keys() ] + if len(colsExist)==0: + return (None,None) + + Values = [ts[c] if c in ts.keys() else np.nan for c in cols ] + nMissing = np.sum(np.isnan(Values)) + #Values = ts[cols].T + #nCoun=len(Values) + if nMissing==nr: + return (None,None) + if len(colsExist)nr: + print('[WARN] More values found for {}, found {}/{}'.format(colname,len(cols),nr)) + return (colname,Values) + +def radialInterpTS(df, r, varName, r_ref, blade=1, bldFmt='AB{:d}', ndFmt='N{:03d}', method='interp'): + """ + Interpolate a time series at a given radial position for a given variable (varName) + INPUTS: + - df : a dataframe (typically with OpenFAST time series) + - r : radial positions of node where data is to be interpolated + - varName: variable name (and unit) to be interpolated. + The dataframe column will be assumed to be "BldFmt"+"ndFmt"+varName + - r_ref : radial position of nodal data present in the dataframe + - bldFmt : format for blade number, e.g. 'B{:d}' or 'AB{:d}' + - ndFmt : format for node number, e.g. 'N{:d}' or 'N{:03d}' + OUTPUT: + - interpolated time series + """ + # --- Sanity checks + r_ref = np.asarray(r_ref) + if not np.all(r_ref[:-1] <= r_ref[1:]): + raise Exception('This function only works for ascending radial values') + + # No extrapolation + if rnp.max(r_ref): + raise Exception('Extrapolation not supported') + + # Exactly on first or last nodes + if r==r_ref[0]: + col=bldFmt.format(blade) + ndFmt.format(1) + varName + if col in df.columns.values: + return df[col] + else: + raise Exception('Column {} not found in dataframe'.format(col)) + elif r==r_ref[-1]: + col=bldFmt.format(blade) + ndFmt.format(len(r_ref)+1) + varName + if col in df.columns.values: + return df[col] + else: + raise Exception('Column {} not found in dataframe'.format(col)) + + if method=='interp': + # Interpolation + iBef = np.where(r_reftStart].copy() + + dfPsi= bin_mean_DF(df, psiBin, colPsi) + if np.any(dfPsi['Counts']<1): + print('[WARN] some bins have no data! Increase the bin size.') + + return dfPsi + + +def averageDF(df,avgMethod='periods',avgParam=None,ColMap=None,ColKeep=None,ColSort=None,stats=['mean']): + """ + See average PostPro for documentation, same interface, just does it for one dataframe + """ + def renameCol(x): + for k,v in ColMap.items(): + if x==v: + return k + return x + # Before doing the colomn map we store the time + time = df['Time_[s]'].values + timenoNA = time[~np.isnan(time)] + # Column mapping + if ColMap is not None: + ColMapMiss = [v for _,v in ColMap.items() if v not in df.columns.values] + if len(ColMapMiss)>0: + print('[WARN] Signals missing and omitted for ColMap:\n '+'\n '.join(ColMapMiss)) + df.rename(columns=renameCol,inplace=True) + ## Defining a window for stats (start time and end time) + if avgMethod.lower()=='constantwindow': + tEnd = timenoNA[-1] + if avgParam is None: + tStart=timenoNA[0] + else: + tStart =tEnd-avgParam + elif avgMethod.lower()=='periods': + # --- Using azimuth to find periods + if 'Azimuth_[deg]' not in df.columns: + raise Exception('The sensor `Azimuth_[deg]` does not appear to be in the output file. You cannot use the averaging method by `periods`, use `constantwindow` instead.') + # NOTE: potentially we could average over each period and then average + psi=df['Azimuth_[deg]'].values + _,iBef = _zero_crossings(psi-psi[-10],direction='up') + if len(iBef)==0: + _,iBef = _zero_crossings(psi-180,direction='up') + if len(iBef)==0: + print('[WARN] Not able to find a zero crossing!') + tEnd = time[-1] + iBef=[0] + else: + tEnd = time[iBef[-1]] + + if avgParam is None: + tStart=time[iBef[0]] + else: + avgParam=int(avgParam) + if len(iBef)-10: + print('[WARN] Signals missing and omitted for ColKeep:\n '+'\n '.join(ColKeepMiss)) + df=df[ColKeepSafe] + if tStart=tStart) & (time<=tEnd) & (~np.isnan(time)))[0] + iEnd = IWindow[-1] + iStart = IWindow[0] + ## Absolute and relative differences at window extremities + DeltaValuesAbs=(df.iloc[iEnd]-df.iloc[iStart]).abs() +# DeltaValuesRel=(df.iloc[iEnd]-df.iloc[iStart]).abs()/df.iloc[iEnd] + DeltaValuesRel=(df.iloc[IWindow].max()-df.iloc[IWindow].min())/df.iloc[IWindow].mean() + #EndValues=df.iloc[iEnd] + #if avgMethod.lower()=='periods_omega': + # if DeltaValuesRel['RotSpeed_[rpm]']*100>5: + # print('[WARN] Rotational speed vary more than 5% in averaging window ({}%) for simulation: {}'.format(DeltaValuesRel['RotSpeed_[rpm]']*100,f)) + ## Stats values during window + # MeanValues = df[IWindow].mean() + # StdValues = df[IWindow].std() + if 'mean' in stats: + MeanValues = pd.DataFrame(df.iloc[IWindow].mean()).transpose() + else: + raise NotImplementedError() + return MeanValues + + + +def averagePostPro(outFiles,avgMethod='periods',avgParam=None,ColMap=None,ColKeep=None,ColSort=None,stats=['mean']): + """ Opens a list of FAST output files, perform average of its signals and return a panda dataframe + For now, the scripts only computes the mean within a time window which may be a constant or a time that is a function of the rotational speed (see `avgMethod`). + The script only computes the mean for now. Other stats will be added + + `ColMap` : dictionary where the key is the new column name, and v the old column name. + Default: None, output is not sorted + NOTE: the mapping is done before sorting and `ColKeep` is applied + ColMap = {'WS':Wind1VelX_[m/s], 'RPM': 'RotSpeed_[rpm]'} + `ColKeep` : List of strings corresponding to the signals to analyse. + Default: None, all columns are analysed + Example: ColKeep=['RotSpeed_[rpm]','BldPitch1_[deg]','RtAeroCp_[-]'] + or: ColKeep=list(ColMap.keys()) + `avgMethod` : string defining the method used to determine the extent of the averaging window: + - 'periods': use a number of periods(`avgParam`), determined by the azimuth. + - 'periods_omega': use a number of periods(`avgParam`), determined by the mean RPM + - 'constantwindow': the averaging window is constant (defined by `avgParam`). + `avgParam`: based on `avgMethod` it is either + - for 'periods_*': the number of revolutions for the window. + Default: None, as many period as possible are used + - for 'constantwindow': the number of seconds for the window + Default: None, full simulation length is used + """ + result=None + invalidFiles =[] + # Loop trough files and populate result + for i,f in enumerate(outFiles): + try: + df=FASTOutputFile(f).toDataFrame() + except: + invalidFiles.append(f) + continue + postpro=averageDF(df, avgMethod=avgMethod, avgParam=avgParam, ColMap=ColMap, ColKeep=ColKeep,ColSort=ColSort,stats=stats) + MeanValues=postpro # todo + if result is None: + # We create a dataframe here, now that we know the colums + columns = MeanValues.columns + result = pd.DataFrame(np.nan, index=np.arange(len(outFiles)), columns=columns) + result.iloc[i,:] = MeanValues.copy().values + + if ColSort is not None: + # Sorting + result.sort_values([ColSort],inplace=True,ascending=True) + result.reset_index(drop=True,inplace=True) + + if len(invalidFiles)==len(outFiles): + raise Exception('None of the files can be read (or exist)!') + elif len(invalidFiles)>0: + print('[WARN] There were {} missing/invalid files: {}'.format(len(invalidFiles),invalidFiles)) + + + return result + + +if __name__ == '__main__': + main() diff --git a/pydatview/fast/runner.py b/pydatview/fast/runner.py index f321a81..3a1cb57 100644 --- a/pydatview/fast/runner.py +++ b/pydatview/fast/runner.py @@ -1,175 +1,187 @@ -# --- For cmd.py -from __future__ import division, print_function -import os -import subprocess -import multiprocessing - -import collections -import glob -import pandas as pd -import numpy as np -import shutil -import stat -import re - -# --- Fast libraries -from weio.weio.fast_input_file import FASTInputFile -from weio.weio.fast_output_file import FASTOutputFile -# from pyFAST.input_output.fast_input_file import FASTInputFile -# from pyFAST.input_output.fast_output_file import FASTOutputFile - -FAST_EXE='openfast' - -# --------------------------------------------------------------------------------} -# --- Tools for executing FAST -# --------------------------------------------------------------------------------{ -# --- START cmd.py -def run_cmds(inputfiles, exe, parallel=True, showOutputs=True, nCores=None, showCommand=True): - """ Run a set of simple commands of the form `exe input_file` - By default, the commands are run in "parallel" (though the method needs to be improved) - The stdout and stderr may be displayed on screen (`showOutputs`) or hidden. - A better handling is yet required. - """ - Failed=[] - def _report(p): - if p.returncode==0: - print('[ OK ] Input : ',p.input_file) - else: - Failed.append(p) - print('[FAIL] Input : ',p.input_file) - print(' Directory: '+os.getcwd()) - print(' Command : '+p.cmd) - print(' Use `showOutputs=True` to debug, or run the command above.') - #out, err = p.communicate() - #print('StdOut:\n'+out) - #print('StdErr:\n'+err) - ps=[] - iProcess=0 - if nCores is None: - nCores=multiprocessing.cpu_count() - if nCores<0: - nCores=len(inputfiles)+1 - for i,f in enumerate(inputfiles): - #print('Process {}/{}: {}'.format(i+1,len(inputfiles),f)) - ps.append(run_cmd(f, exe, wait=(not parallel), showOutputs=showOutputs, showCommand=showCommand)) - iProcess += 1 - # waiting once we've filled the number of cores - # TODO: smarter method with proper queue, here processes are run by chunks - if parallel: - if iProcess==nCores: - for p in ps: - p.wait() - for p in ps: - _report(p) - ps=[] - iProcess=0 - # Extra process if not multiptle of nCores (TODO, smarter method) - for p in ps: - p.wait() - for p in ps: - _report(p) - # --- Giving a summary - if len(Failed)==0: - print('[ OK ] All simulations run successfully.') - return True - else: - print('[FAIL] {}/{} simulations failed:'.format(len(Failed),len(inputfiles))) - for p in Failed: - print(' ',p.input_file) - return False - -def run_cmd(input_file_or_arglist, exe, wait=True, showOutputs=False, showCommand=True): - """ Run a simple command of the form `exe input_file` or `exe arg1 arg2` """ - # TODO Better capture STDOUT - if isinstance(input_file_or_arglist, list): - args= [exe] + input_file_or_arglist - input_file = ' '.join(input_file_or_arglist) - input_file_abs = input_file - else: - input_file=input_file_or_arglist - if not os.path.isabs(input_file): - input_file_abs=os.path.abspath(input_file) - else: - input_file_abs=input_file - if not os.path.exists(exe): - raise Exception('Executable not found: {}'.format(exe)) - args= [exe,input_file] - #args = 'cd '+workDir+' && '+ exe +' '+basename - shell=False - if showOutputs: - STDOut= None - else: - STDOut= open(os.devnull, 'w') - if showCommand: - print('Running: '+' '.join(args)) - if wait: - class Dummy(): - pass - p=Dummy() - p.returncode=subprocess.call(args , stdout=STDOut, stderr=subprocess.STDOUT, shell=shell) - else: - p=subprocess.Popen(args, stdout=STDOut, stderr=subprocess.STDOUT, shell=shell) - # Storing some info into the process - p.cmd = ' '.join(args) - p.args = args - p.input_file = input_file - p.input_file_abs = input_file_abs - p.exe = exe - return p -# --- END cmd.py - -def run_fastfiles(fastfiles, fastExe=None, parallel=True, showOutputs=True, nCores=None, showCommand=True, reRun=True): - if fastExe is None: - fastExe=FAST_EXE - if not reRun: - # Figure out which files exist - newfiles=[] - for f in fastfiles: - base=os.path.splitext(f)[0] - if os.path.exists(base+'.outb') or os.path.exists(base+'.out'): - print('>>> Skipping existing simulation for: ',f) - pass - else: - newfiles.append(f) - fastfiles=newfiles - - return run_cmds(fastfiles, fastExe, parallel=parallel, showOutputs=showOutputs, nCores=nCores, showCommand=showCommand) - -def run_fast(input_file, fastExe=None, wait=True, showOutputs=False, showCommand=True): - if fastExe is None: - fastExe=FAST_EXE - return run_cmd(input_file, fastExe, wait=wait, showOutputs=showOutputs, showCommand=showCommand) - - -def writeBatch(batchfile, fastfiles, fastExe=None): - """ Write batch file, everything is written relative to the batch file""" - if fastExe is None: - fastExe=FAST_EXE - fastExe_abs = os.path.abspath(fastExe) - batchfile_abs = os.path.abspath(batchfile) - batchdir = os.path.dirname(batchfile_abs) - fastExe_rel = os.path.relpath(fastExe_abs, batchdir) - with open(batchfile,'w') as f: - for ff in fastfiles: - ff_abs = os.path.abspath(ff) - ff_rel = os.path.relpath(ff_abs, batchdir) - l = fastExe_rel + ' '+ ff_rel - f.write("%s\n" % l) - - -def removeFASTOuputs(workDir): - # Cleaning folder - for f in glob.glob(os.path.join(workDir,'*.out')): - os.remove(f) - for f in glob.glob(os.path.join(workDir,'*.outb')): - os.remove(f) - for f in glob.glob(os.path.join(workDir,'*.ech')): - os.remove(f) - for f in glob.glob(os.path.join(workDir,'*.sum')): - os.remove(f) - -if __name__=='__main__': - run_cmds(['main1.fst','main2.fst'], './Openfast.exe', parallel=True, showOutputs=False, nCores=4, showCommand=True) - pass - # --- Test of templateReplace - +# --- For cmd.py +from __future__ import division, print_function +import os +import subprocess +import multiprocessing + +import collections +import glob +import pandas as pd +import numpy as np +import shutil +import stat +import re + +# --- Fast libraries +from weio.weio.fast_input_file import FASTInputFile +from weio.weio.fast_output_file import FASTOutputFile +# from pyFAST.input_output.fast_input_file import FASTInputFile +# from pyFAST.input_output.fast_output_file import FASTOutputFile + +FAST_EXE='openfast' + +# --------------------------------------------------------------------------------} +# --- Tools for executing FAST +# --------------------------------------------------------------------------------{ +# --- START cmd.py +def run_cmds(inputfiles, exe, parallel=True, showOutputs=True, nCores=None, showCommand=True): + """ Run a set of simple commands of the form `exe input_file` + By default, the commands are run in "parallel" (though the method needs to be improved) + The stdout and stderr may be displayed on screen (`showOutputs`) or hidden. + A better handling is yet required. + """ + Failed=[] + def _report(p): + if p.returncode==0: + print('[ OK ] Input : ',p.input_file) + else: + Failed.append(p) + print('[FAIL] Input : ',p.input_file) + print(' Directory: '+os.getcwd()) + print(' Command : '+p.cmd) + print(' Use `showOutputs=True` to debug, or run the command above.') + #out, err = p.communicate() + #print('StdOut:\n'+out) + #print('StdErr:\n'+err) + ps=[] + iProcess=0 + if nCores is None: + nCores=multiprocessing.cpu_count() + if nCores<0: + nCores=len(inputfiles)+1 + for i,f in enumerate(inputfiles): + #print('Process {}/{}: {}'.format(i+1,len(inputfiles),f)) + ps.append(run_cmd(f, exe, wait=(not parallel), showOutputs=showOutputs, showCommand=showCommand)) + iProcess += 1 + # waiting once we've filled the number of cores + # TODO: smarter method with proper queue, here processes are run by chunks + if parallel: + if iProcess==nCores: + for p in ps: + p.wait() + for p in ps: + _report(p) + ps=[] + iProcess=0 + # Extra process if not multiptle of nCores (TODO, smarter method) + for p in ps: + p.wait() + for p in ps: + _report(p) + # --- Giving a summary + if len(Failed)==0: + print('[ OK ] All simulations run successfully.') + return True + else: + print('[FAIL] {}/{} simulations failed:'.format(len(Failed),len(inputfiles))) + for p in Failed: + print(' ',p.input_file) + return False + +def run_cmd(input_file_or_arglist, exe, wait=True, showOutputs=False, showCommand=True): + """ Run a simple command of the form `exe input_file` or `exe arg1 arg2` """ + # TODO Better capture STDOUT + if isinstance(input_file_or_arglist, list): + args= [exe] + input_file_or_arglist + input_file = ' '.join(input_file_or_arglist) + input_file_abs = input_file + else: + input_file=input_file_or_arglist + if not os.path.isabs(input_file): + input_file_abs=os.path.abspath(input_file) + else: + input_file_abs=input_file + if not os.path.exists(exe): + raise Exception('Executable not found: {}'.format(exe)) + args= [exe,input_file] + #args = 'cd '+workDir+' && '+ exe +' '+basename + shell=False + if showOutputs: + STDOut= None + else: + STDOut= open(os.devnull, 'w') + if showCommand: + print('Running: '+' '.join(args)) + if wait: + class Dummy(): + pass + p=Dummy() + p.returncode=subprocess.call(args , stdout=STDOut, stderr=subprocess.STDOUT, shell=shell) + else: + p=subprocess.Popen(args, stdout=STDOut, stderr=subprocess.STDOUT, shell=shell) + # Storing some info into the process + p.cmd = ' '.join(args) + p.args = args + p.input_file = input_file + p.input_file_abs = input_file_abs + p.exe = exe + return p +# --- END cmd.py + +def run_fastfiles(fastfiles, fastExe=None, parallel=True, showOutputs=True, nCores=None, showCommand=True, reRun=True): + if fastExe is None: + fastExe=FAST_EXE + if not reRun: + # Figure out which files exist + newfiles=[] + for f in fastfiles: + base=os.path.splitext(f)[0] + if os.path.exists(base+'.outb') or os.path.exists(base+'.out'): + print('>>> Skipping existing simulation for: ',f) + pass + else: + newfiles.append(f) + fastfiles=newfiles + + return run_cmds(fastfiles, fastExe, parallel=parallel, showOutputs=showOutputs, nCores=nCores, showCommand=showCommand) + +def run_fast(input_file, fastExe=None, wait=True, showOutputs=False, showCommand=True): + if fastExe is None: + fastExe=FAST_EXE + return run_cmd(input_file, fastExe, wait=wait, showOutputs=showOutputs, showCommand=showCommand) + + +def writeBatch(batchfile, fastfiles, fastExe=None, nBatches=1): + """ Write batch file, everything is written relative to the batch file""" + if fastExe is None: + fastExe=FAST_EXE + fastExe_abs = os.path.abspath(fastExe) + batchfile_abs = os.path.abspath(batchfile) + batchdir = os.path.dirname(batchfile_abs) + fastExe_rel = os.path.relpath(fastExe_abs, batchdir) + def writeb(batchfile, fastfiles): + with open(batchfile,'w') as f: + for ff in fastfiles: + ff_abs = os.path.abspath(ff) + ff_rel = os.path.relpath(ff_abs, batchdir) + l = fastExe_rel + ' '+ ff_rel + f.write("%s\n" % l) + if nBatches==1: + writeb(batchfile, fastfiles) + else: + splits = np.array_split(fastfiles,nBatches) + base, ext = os.path.splitext(batchfile) + for i in np.arange(nBatches): + writeb(base+'_{:d}'.format(i+1) + ext, splits[i]) + + + + + + +def removeFASTOuputs(workDir): + # Cleaning folder + for f in glob.glob(os.path.join(workDir,'*.out')): + os.remove(f) + for f in glob.glob(os.path.join(workDir,'*.outb')): + os.remove(f) + for f in glob.glob(os.path.join(workDir,'*.ech')): + os.remove(f) + for f in glob.glob(os.path.join(workDir,'*.sum')): + os.remove(f) + +if __name__=='__main__': + run_cmds(['main1.fst','main2.fst'], './Openfast.exe', parallel=True, showOutputs=False, nCores=4, showCommand=True) + pass + # --- Test of templateReplace + diff --git a/pydatview/tools/curve_fitting.py b/pydatview/tools/curve_fitting.py index 69d6d34..c03f158 100644 --- a/pydatview/tools/curve_fitting.py +++ b/pydatview/tools/curve_fitting.py @@ -1,1382 +1,1384 @@ -""" -Set of tools to fit a model to data. - -The quality of a fit is usually a strong function of the initial guess. -Because of this this package contains different kind of "helpers" and "wrapper" tools. - -FUNCTIONS ---------- - -This package can help fitting using: - 1) High level functions, e.g. fit_sinusoid -OR using the `model_fit` function that handles: - 2) User defined "eval" model, e.g. the user sets a string '{a}*x + {b}*x**2' - 3) Predefined models, e.g. Gaussian, logarithmic, weibull_pdf, etc. - 4) Predefined fitters, e.g. SinusoidFitter, DiscretePolynomialFitter, ContinuousPolynomialFitter - -1) The high level fitting functions available are: - - fit_sinusoid - - fit_polynomial - - fit_gaussian - -2) User defined model, using the `model_fit_function`: - - model_fit('eval: {a} + {b}*x**3 + {c}*x**5', x, y) - - model_fit('eval: {u_ref}*(x/{z_ref})**{alpha}', x, y, p0=(8,9,0.1), bounds=(0.001,100)) - User defined models, will require the user to provide an initial guess and potentially bounds - -3) Fitting using predefined models using the `model_fit` function : - - model_fit('predef: gaussian', x, y) - - model_fit('predef: gaussian-yoff', x, y) - - model_fit('predef: powerlaw_alpha', x, y, p0=(0.1), **fun_kwargs) - - model_fit('predef: powerlaw_u_alpha', x, y, **fun_kwargs) - - model_fit('predef: expdecay', x, y) - - model_fit('predef: weibull_pdf', x, y) - Predefined models have default values for bounds and guesses that can be overriden. - -4) Predefined fitters, wrapped with the `model_fit` function: - - model_fit('fitter: sinusoid', x, y) - - model_fit('fitter: polynomial_discrete', x, y, exponents=[0,2,4]) - - model_fit('fitter: polynomial_continuous', x, y, order=3) - Predefined fitters can handle bounds/initial guess better - -INPUTS: --------- -All functions have the following inputs: - - x: array on the x-axis - - y: values on the y-axis (to be fitted against a model) -Additionally some functions have the following inputs: - - p0: initial values for parameters, either a string or a dict: - - string: the string is converted to a dictionary, assuming key value pairs - example: 'a=0, b=1.3' - - dictionary, then keys should corresponds to the parameters of the model - example: {'a':0, 'b':1.3} - - bounds: bounds for each parameters, either a string or a dictionary. - NOTE: pi and inf are available to set bounds - - if a string, the string is converted to a dictionary assuming key value pairs - example: 'a=(0,3), b=(-inf,pi)' - - if a dictionary, the keys should corresponds to the parameters of the model - example: {'a':(0,3), 'b':(-inf,pi)} - -OUTPUTS: --------- -All functions returns the same outputs: - - y_fit : the fit to the y data - - pfit : the list of parameters used - - fitter: a `ModelFitter` object useful to manipulate the fit, in particular: - - fitter.model: dictionary with readable versions of the parameters, formula, - function to reevaluate the fit on a different x, etc. - - fitter.data: data used for the fit - - fitter.fit_data: perform another fit using different data - -MISC ----- -High-level fitters, predefined models or fitters can be added to this class. - -""" -import numpy as np -import scipy.optimize as so -import scipy.stats as stats -import string -import re -from collections import OrderedDict -from numpy import sqrt, pi, exp, cos, sin, log, inf, arctan # for user convenience -import six - -# --------------------------------------------------------------------------------} -# --- High level fitters -# --------------------------------------------------------------------------------{ -def fit_sinusoid(x,y,physical=False): - """ Fits a sinusoid to y with formula: - if physical is False: y_fit=A*sin(omega*x+phi)+B - if physical is True: y_fit=A*sin(2*pi(f+phi/360))+B """ - y_fit, pfit, fitter = model_fit('fitter: sinusoid', x, y, physical=physical) - return y_fit, pfit, fitter - -def fit_polynomial(x, y, order=None, exponents=None): - """ Fits a polynomial to y, either: - - full up to a given order: y_fit= {a_i} x^i , i=0..order - - or using a discrete set of exponents: y_fit= {a_i} x^e[i], i=0,..len(exponents) - OPTIONAL INPUTS: - - order: integer - Maximum order of polynomial, e.g. 2: for a x**0 + b x**1 + c x**2 - - exponents: array-like - Exponents to be used. e.g. [0,2,5] for a x**0 + b x**2 + c x**5 - """ - if order is not None: - y_fit, pfit, fitter = model_fit('fitter: polynomial_continuous', x, y, order=order) - else: - y_fit, pfit, fitter = model_fit('fitter: polynomial_discrete', x, y, exponents=exponents) - return y_fit, pfit, fitter - -def fit_gaussian(x, y, offset=False): - """ Fits a gaussin to y, with the following formula: - offset is True : '1/({sigma}*sqrt(2*pi)) * exp(-1/2 * ((x-{mu})/{sigma})**2)' - offset is False: '1/({sigma}*sqrt(2*pi)) * exp(-1/2 * ((x-{mu})/{sigma})**2) + {y0}' - """ - if offset: - return model_fit('predef: gaussian-yoff', x, y) - else: - return model_fit('predef: gaussian', x, y) - -# --------------------------------------------------------------------------------} -# --- Simple mid level fitter -# --------------------------------------------------------------------------------{ -def fit_polynomial_continuous(x, y, order): - """Fit a polynomial with a continuous set of exponents up to a given order - - Parameters - ---------- - x,y: see `model_fit` - order: integer - Maximum order of polynomial, e.g. 2: for a x**0 + b x**1 + c x**2 - - Returns - ------- - see `model_fit` - """ - pfit = np.polyfit(x,y,order) - y_fit = np.polyval(pfit,x) - - # coeffs_dict, e.g. {'a':xxx, 'b':xxx}, formula = 'a*x + b' - variables = string.ascii_lowercase[:order+1] - coeffs_dict = OrderedDict([(var,coeff) for i,(coeff,var) in enumerate(zip(pfit,variables))]) - formula = ' + '.join(['{}*x**{}'.format(var,order-i) for i,var in enumerate(variables)]) - formula = _clean_formula(formula) - - return y_fit,pfit,{'coeffs':coeffs_dict,'formula':formula,'fitted_function':lambda xx : np.polyval(pfit,xx)} - -def fit_polynomial_discrete(x, y, exponents): - """Fit a polynomial with a discrete set of exponents - - Parameters - ---------- - x,y: see `model_fit` - exponents: array-like - Exponents to be used. e.g. [0,2,5] for a x**0 + b x**2 + c x**5 - - Returns - ------- - see `model_fit` - """ - #exponents=-np.sort(-np.asarray(exponents)) - X_poly=np.array([]) - for i,e in enumerate(exponents): - if i==0: - X_poly = np.array([x**e]) - else: - X_poly = np.vstack((X_poly,x**e)) - try: - pfit = np.linalg.lstsq(X_poly.T, y, rcond=None)[0] - except: - pfit = np.linalg.lstsq(X_poly.T, y) - y_fit= np.dot(pfit, X_poly) - - variables = string.ascii_lowercase[:len(exponents)] - coeffs_dict = OrderedDict([(var,coeff) for i,(coeff,var) in enumerate(zip(pfit,variables))]) - formula = ' + '.join(['{}*x**{}'.format(var,e) for var,e in zip(variables,exponents)]) - formula = _clean_formula(formula) - - return y_fit,pfit,{'coeffs':coeffs_dict,'formula':formula} - - -def fit_powerlaw_u_alpha(x, y, z_ref=100, p0=(10,0.1)): - """ - p[0] : u_ref - p[1] : alpha - """ - pfit, _ = so.curve_fit(lambda x, *p : p[0] * (x / z_ref) ** p[1], x, y, p0=p0) - y_fit = pfit[0] * (x / z_ref) ** pfit[1] - coeffs_dict=OrderedDict([('u_ref',pfit[0]),('alpha',pfit[1])]) - formula = '{u_ref} * (z / {z_ref}) ** {alpha}' - fitted_fun = lambda xx: pfit[0] * (xx / z_ref) ** pfit[1] - return y_fit, pfit, {'coeffs':coeffs_dict,'formula':formula,'fitted_function':fitted_fun} - - -# --------------------------------------------------------------------------------} -# --- Predifined functions NOTE: they need to be registered in variable `MODELS` -# --------------------------------------------------------------------------------{ -def gaussian(x, p): - """ p = (mu,sigma) """ - return 1/(p[1]*np.sqrt(2*np.pi)) * np.exp(-1/2*((x-p[0])/p[1])**2) - -def gaussian_w_offset(x, p): - """ p = (mu,sigma,y0) """ - return 1/(p[1]*np.sqrt(2*np.pi)) * np.exp(-1/2*((x-p[0])/p[1])**2) + p[2] - -def logarithmic(x, p): - """ p = (a,b) """ - return p[0]*np.log(x)+p[1] - -def powerlaw_all(x, p): - """ p = (alpha,u_ref,z_ref) """ - return p[1] * (x / p[2]) ** p[0] - -def powerlaw_alpha(x, p, u_ref=10, z_ref=100): - """ p = alpha """ - return u_ref * (x / z_ref) ** p[0] - -def powerlaw_u_alpha(x, p, z_ref=100): - """ p = (alpha, u_ref) """ - return p[1] * (x / z_ref) ** p[0] - -def expdecay(x, p, z_ref=100): - """ p = (A, k, B) formula: {A}*exp(-{k}*x)+{B} """, - return p[0]* np.exp(-p[1]*x) + p[2] - -def weibull_pdf(x, p, z_ref=100): - """ p = (A, k) formula: {k}*x**({k}-1) / {A}**{k} * np.exp(-x/{A})**{k} """, - return p[1] * x ** (p[1] - 1) / p[0] ** p[1] * np.exp(-(x / p[0]) ** p[1]) - -def sinusoid(x, p): - """ p = (A,omega,phi,B) """ - return p[0]*np.sin(p[1]*x+p[2]) + p[3] -def sinusoid_f(x, p): - """ p = (A,f,phi_deg,B) """ - return p[0]*np.sin(2*pi*(p[1]*x+p[2]/360)) + p[3] - - - -def secondorder_impulse(t, p): - """ p = (A, omega0, zeta, B, t0) """ - A, omega0, zeta, B, t0 = p - omegad = omega0 * sqrt(1-zeta**2) - phi = np.arctan2(zeta, sqrt(1-zeta**2)) - x = np.zeros(t.shape) - bp = t>=t0 - t = t[bp]-t0 - x[bp] += A * sin(omegad * t) * exp(-zeta * omega0 * t) - x+=B - return x - -def secondorder_step(t, p): - """ p = (A, omega0, zeta, B, t0) """ - A, omega0, zeta, B, t0 = p - omegad = omega0 * sqrt(1-zeta**2) - phi = np.arctan2(zeta, sqrt(1-zeta**2)) - x = np.zeros(t.shape) - bp = t>=t0 - t = t[bp]-t0 - x[bp] += A * ( 1- exp(-zeta*omega0 *t)/sqrt(1-zeta**2) * cos(omegad*t - phi)) - x+=B - return x - - -def gentorque(x, p): - """ - INPUTS: - x: generator or rotor speed - p= (RtGnSp, RtTq , Rgn2K , SlPc , SpdGenOn) - RtGnSp Rated generator speed for simple variable-speed generator control (HSS side) (rpm) - RtTq Rated generator torque/constant generator torque in Region 3 for simple variable-speed generator control (HSS side) (N-m) - Rgn2K Generator torque constant in Region 2 for simple variable-speed generator control (HSS side) (N-m/rpm^2) - SlPc Rated generator slip percentage in Region 2 1/2 for simple variable-speed generator control (%) - - OUTPUTS: - GenTrq: Generator torque [Nm] - - """ - - # Init - RtGnSp, RtTq , Rgn2K , SlPc, SpdGenOn = p - GenTrq=np.zeros(x.shape) - - xmin,xmax=np.min(x), np.max(x) -# if RtGnSp<(xmin+xmax)*0.4: -# return GenTrq - - # Setting up different regions - xR21_Start = RtGnSp*(1-SlPc/100) - bR0 = xSpdGenOn , x=xR21_Start , x<=RtGnSp) - bR3 = x>RtGnSp - # R21 - y1, y2 = Rgn2K*xR21_Start**2, RtTq - x1, x2 = xR21_Start , RtGnSp - m=(y2-y1)/(x2-x1) - GenTrq[bR21] = m*(x[bR21]-x1) + y1 # R21 - GenTrq[bR2] = Rgn2K * x[bR2]**2 # R2 - GenTrq[bR3] = RtTq # R3 - return GenTrq - - -MODELS =[ -# {'label':'User defined model', -# 'name':'eval:', -# 'formula':'{a}*x**2 + {b}', -# 'coeffs':None, -# 'consts':None, -# 'bounds':None }, -{'label':'Gaussian', 'handle':gaussian,'id':'predef: gaussian', -'formula':'1/({sigma}*sqrt(2*pi)) * exp(-1/2 * ((x-{mu})/{sigma})**2)', -'coeffs' :'mu=0, sigma=1', # Order Important -'consts' :None, -'bounds' :None}, -{'label':'Gaussian with y-offset','handle':gaussian_w_offset,'id':'predef: gaussian-yoff', -'formula':'1/({sigma}*sqrt(2*pi)) * exp(-1/2 * ((x-{mu})/{sigma})**2) + {y0}', -'coeffs' :'mu=0, sigma=1, y0=0', #Order Important -'consts' :None, -'bounds' :'sigma=(-inf,inf), mu=(-inf,inf), y0=(-inf,inf)'}, -{'label':'Exponential', 'handle': expdecay, 'id':'predef: expdecay', -'formula':'{A}*exp(-{k}*x)+{B}', -'coeffs' :'A=1, k=1, B=0', # Order Important -'consts' :None, -'bounds' :None}, -{'label':'Logarithmic', 'handle': logarithmic, 'id':'predef: logarithmic', -'formula':'{a}*log(x)+{b}', -'coeffs' :'a=1, b=0', # Order Important -'consts' :None, -'bounds' :None}, -{'label':'2nd order impulse/decay (manual)', 'handle': secondorder_impulse, 'id':'predef: secondorder_impulse', -'formula':'{A}*exp(-{zeta}*{omega}*(x-{x0})) * sin({omega}*sqrt(1-{zeta}**2))) +{B}', -'coeffs' :'A=1, omega=1, zeta=0.001, B=0, x0=0', # Order Important -'consts' :None, -'bounds' :'A=(-inf,inf), omega=(0,100), zeta=(0,1), B=(-inf,inf), x0=(-inf,inf)'}, -{'label':'2nd order step (manual)', 'handle': secondorder_step, 'id':'predef: secondorder_step', -'formula':'{A}*(1-exp(-{zeta}*{omega}*(x-{x0}))/sqrt(1-{zeta}**2) * cos({omega}*sqrt(1-{zeta}**2)-arctan({zeta}/sqrt(1-{zeta}**2)))) +{B}', -'coeffs' :'A=1, omega=1, zeta=0.001, B=0, x0=0', # Order Important -'consts' :None, -'bounds' :'A=(-inf,inf), omega=(0,100), zeta=(0,1), B=(-inf,inf), x0=(-inf,inf)'}, - -# --- Wind Energy -{'label':'Power law (alpha)', 'handle':powerlaw_alpha, 'id':'predef: powerlaw_alpha', -'formula':'{u_ref} * (z / {z_ref}) ** {alpha}', -'coeffs' : 'alpha=0.1', # Order important -'consts' : 'u_ref=10, z_ref=100', -'bounds' : 'alpha=(-1,1)'}, -{'label':'Power law (alpha,u)', 'handle':powerlaw_u_alpha, 'id':'predef: powerlaw_u_alpha', -'formula':'{u_ref} * (z / {z_ref}) ** {alpha}', -'coeffs': 'alpha=0.1, u_ref=10', # Order important -'consts': 'z_ref=100', -'bounds': 'u_ref=(0,inf), alpha=(-1,1)'}, -# 'powerlaw_all':{'label':'Power law (alpha,u,z)', 'handle':powerlaw_all, # NOTE: not that useful -# 'formula':'{u_ref} * (z / {z_ref}) ** {alpha}', -# 'coeffs': 'alpha=0.1, u_ref=10, z_ref=100', -# 'consts': None, -# 'bounds': 'u_ref=(0,inf), alpha=(-1,1), z_ref=(0,inf)'}, -{'label':'Weibull PDF', 'handle': weibull_pdf, 'id':'predef: weibull_pdf', -'formula':'{k}*x**({k}-1) / {A}**{k} * np.exp(-x/{A})**{k}', -'coeffs' :'A=1, k=1', # Order Important -'consts' :None, -'bounds' :'A=(0.1,inf), k=(0,5)'}, -{'label':'Generator Torque', 'handle': gentorque, 'id':'predef: gentorque', -'formula': '{RtGnSp} , {RtTq} , {Rgn2K} , {SlPc} , {SpdGenOn}', -'coeffs' : 'RtGnSp=100 , RtTq=1000 , Rgn2K=0.01 ,SlPc=5 , SpdGenOn=0', # Order Important -'consts' :None, -'bounds' :'RtGnSp=(0.1,inf) , RtTq=(1,inf), Rgn2K=(0.0,0.1) ,SlPc=(0,20) , SpdGenOn=(0,inf)'} -] - -# --------------------------------------------------------------------------------} -# --- Main function wrapper -# --------------------------------------------------------------------------------{ -def model_fit(func, x, y, p0=None, bounds=None, **fun_kwargs): - """ - Parameters - ---------- - func: string or function handle - - function handle - - string starting with "fitter: ": (see variable FITTERS) - - "fitter: polynomial_continuous 5' : polyfit order 5 - - "fitter: polynomial_discrete 0 2 3 ': fit polynomial of exponents 0 2 3 - - string providing an expression to evaluate, e.g.: - - "eval: {a}*x + {b}*x**2 " - - string starting with "predef": (see variable MODELS) - - "predef: powerlaw_alpha" : - - "predef: powerlaw_all" : - - "predef: gaussian " : - - x: array of x values - y: array of y values - p0: initial values for parameters, either a string or a dict: - - if a string: the string is converted to a dictionary, assuming key value pairs - example: 'a=0, b=1.3' - - if a dictionary, then keys should corresponds to the parameters of the model - example: {'a':0, 'b':1.3} - bounds: bounds for each parameters, either a string or a dictionary. - NOTE: pi and inf are available to set bounds - - if a string, the string is converted to a dictionary assuming key value pairs - example: 'a=(0,3), b=(-inf,pi)' - - if a dictionary, the keys should corresponds to the parameters of the model - example: {'a':(0,3), 'b':(-inf,pi)} - - Returns - ------- - y_fit: array with same shape as `x` - fitted data. - pfit : fitted parameters - fitter: ModelFitter object - """ - - if isinstance(func,six.string_types) and func.find('fitter:')==0: - # --- This is a high level fitter, we call the class - # The info about the class are storred in the global variable FITTERS - # See e.g. SinusoidFitter, DiscretePolynomialFitter - predef_fitters=[m['id'] for m in FITTERS] - if func not in predef_fitters: - raise Exception('Function `{}` not defined in curve_fitting module\n Available fitters: {}'.format(func,predef_fitters)) - i = predef_fitters.index(func) - FitterDict = FITTERS[i] - consts = FITTERS[i]['consts'] - args, missing = set_common_keys(consts, fun_kwargs) - if len(missing)>0: - raise Exception('Curve fitting with `{}` requires the following arguments {}. Missing: {}'.format(func,consts.keys(),missing)) - # Calling the class - fitter = FitterDict['handle'](x=x, y=y, p0=p0, bounds=bounds, **fun_kwargs) - else: - fitter = ModelFitter(func, x, y, p0=p0, bounds=bounds, **fun_kwargs) - - pfit = [v for _,v in fitter.model['coeffs'].items()] - return fitter.data['y_fit'], pfit , fitter - - -# --------------------------------------------------------------------------------} -# --- Main Class -# --------------------------------------------------------------------------------{ -class ModelFitter(): - def __init__(self,func=None, x=None, y=None, p0=None, bounds=None, **fun_kwargs): - - self.model={ - 'name':None, 'model_function':None, 'consts':fun_kwargs, 'formula': 'unavailable', # model signature - 'coeffs':None, 'formula_num':'unavailable', 'fitted_function':None, 'coeffs_init':p0, 'bounds':bounds, # model fitting - 'R2':None, - } - self.data={'x':x,'y':y,'y_fit':None} - - if func is None: - return - self.set_model(func, **fun_kwargs) - - # Initialize function if present - # Perform fit if data and function is present - if x is not None and y is not None: - self.fit_data(x,y,p0,bounds) - - def set_model(self,func, **fun_kwargs): - if callable(func): - # We don't have much additional info - self.model['model_function'] = func - self.model['name'] = func.__name__ - pass - - elif isinstance(func,six.string_types): - if func.find('predef:')==0: - # --- Minimization from a predefined function - predef_models=[m['id'] for m in MODELS] - if func not in predef_models: - raise Exception('Predefined function `{}` not defined in curve_fitting module\n Available functions: {}'.format(func,predef_models)) - i = predef_models.index(func) - ModelDict = MODELS[i] - self.model['model_function'] = ModelDict['handle'] - self.model['name'] = ModelDict['label'] - self.model['formula'] = ModelDict['formula'] - self.model['coeffs'] = extract_key_num(ModelDict['coeffs']) - self.model['coeffs_init'] = self.model['coeffs'].copy() - self.model['consts'] = extract_key_num(ModelDict['consts']) - self.model['bounds'] = extract_key_tuples(ModelDict['bounds']) - - elif func.find('eval:')==0: - # --- Minimization from a eval string - formula=func[5:] - # Extract coeffs {a} {b} {c}, replace by p[0] - variables, formula_eval = extract_variables(formula) - nParams=len(variables) - if nParams==0: - raise Exception('Formula should contains parameters in curly brackets, e.g.: {a}, {b}, {u_1}. No parameters found in {}'.format(formula)) - - # Check that the formula evaluates - x=np.array([1,2,5])*np.sqrt(2) # some random evaluation vector.. - p=[np.sqrt(2)/4]*nParams # some random initial conditions - try: - y=eval(formula_eval) - y=np.asarray(y) - if y.shape!=x.shape: - raise Exception('The formula does not return an array of same size as the input variable x. The formula must include `x`: {}'.format(formula_eval)) - except SyntaxError: - raise Exception('The formula does not evaluate, syntax error raised: {}'.format(formula_eval)) - except ZeroDivisionError: - pass - - # Creating the actual function - def func(x, p): - return eval(formula_eval) - - self.model['model_function'] = func - self.model['name'] = 'user function' - self.model['formula'] = formula - self.model['coeffs'] = OrderedDict([(k,v) for k,v in zip(variables,p)]) - self.model['coeffs_init'] = self.model['coeffs'].copy() - self.model['consts'] = {} - self.model['bounds'] = None - - else: - raise Exception('func string needs to start with `eval:` of `predef:`, func: {}'.format(func)) - else: - raise Exception('func should be string or callable') - - if fun_kwargs is None: - return - if len(fun_kwargs)==0: - return - if self.model['consts'] is None: - raise Exception('Fun_kwargs provided, but no function constants were defined') - - self.model['consts'], missing = set_common_keys(self.model['consts'], fun_kwargs ) - if len(missing)>0: - raise Exception('Curve fitting with function `{}` requires the following arguments {}. Missing: {}'.format(func.__name__,consts.keys(),missing)) - - def setup_bounds(self, bounds, nParams): - if bounds is not None: - self.model['bounds']=bounds # store in model - bounds=self.model['bounds'] # usemodel bounds as default - if bounds is not None: - if isinstance(bounds ,six.string_types): - bounds=extract_key_tuples(bounds) - - if isinstance(bounds ,dict): - if len(bounds)==0 or 'all' in bounds.keys(): - bounds=([-np.inf]*nParams,[np.inf]*nParams) - elif self.model['coeffs'] is not None: - b1=[] - b2=[] - for k in self.model['coeffs'].keys(): - if k in bounds.keys(): - b1.append(bounds[k][0]) - b2.append(bounds[k][1]) - else: - # TODO merge default bounds - raise Exception('Bounds dictionary is missing the key: `{}`'.format(k)) - bounds=(b1,b2) - else: - raise NotImplementedError('Bounds dictionary with no known model coeffs.') - else: - # so.curve_fit needs a 2-tuple - b1,b2=bounds[0],bounds[1] - if not hasattr(b1,'__len__'): - b1=[b1]*nParams - if not hasattr(b2,'__len__'): - b2=[b2]*nParams - bounds=(b1,b2) - else: - bounds=([-np.inf]*nParams,[np.inf]*nParams) - - self.model['bounds']=bounds # store in model - - def setup_guess(self, p0, bounds, nParams): - """ - Setup initial parameter values for the fit, based on what the user provided, and potentially the bounds - - INPUTS: - - p0: initial parameter values for the fit - - if a string (e.g. " a=1, b=3"), it's converted to a dict - - if a dict, the ordered keys of model['coeffs'] are used to sort p0 - - bounds: tuple of lower and upper bounds for each parameters. - Parameters are ordered as function of models['coeffs'] - bounds[0]: lower bounds or all parameters - bounds[1]: upper bounds or all parameters - - We can assume that the bounds are set - """ - def middleOfBounds(i): - """ return middle of bounds for parameter `i`""" - bLow = bounds[0][i] - bHigh = bounds[0][2] - if (bLow,bHigh)==(-np.inf,np.inf): - p_i=0 - elif bLow==-np.inf: - p_i = -abs(bHigh)*2 - elif bHigh== np.inf: - p_i = abs(bLow)*2 - else: - p_i = (bLow+bHigh)/2 - return p_i - - if isinstance(p0 ,six.string_types): - p0=extract_key_num(p0) - if len(p0)==0: - p0=None - - if p0 is None: - # There is some tricky logic here between the priority of bounds and coeffs - if self.model['coeffs'] is not None: - # We rely on function to give us decent init coefficients - p0 = ([v for _,v in self.model['coeffs'].items()]) - elif bounds is None: - p0 = ([0]*nParams) - else: - # use middle of bounds - p0 = [0]*nParams - for i,(b1,b2) in enumerate(zip(bounds[0],bounds[1])): - p0[i] = middleOfBounds(i) - p0 = (p0) - elif isinstance(p0,dict): - # User supplied a dictionary, we use the ordered keys of coeffs to sort p0 - p0_dict=p0.copy() - if self.model['coeffs'] is not None: - p0=[] - for k in self.model['coeffs'].keys(): - if k in p0_dict.keys(): - p0.append(p0_dict[k]) - else: - raise Exception('Guess dictionary is missing the key: `{}`'.format(k)) - else: - raise NotImplementedError('Guess dictionary with no known model coeffs.') - - - if not hasattr(p0,'__len__'): - p0=(p0,) - - # --- Last check that p0 is within bounds - if bounds is not None: - for p,k,lb,ub in zip(p0, self.model['coeffs'].keys(), bounds[0], bounds[1]): - if pub: - raise Exception('Parameter `{}` has the guess value {}, which is larger than the upper bound ({})'.format(k,p,ub)) - # TODO potentially set it as middle of bounds - - # --- Finally, store the initial guesses in the model - self.model['coeffs_init'] = p0 - - def fit(self, func, x, y, p0=None, bounds=None, **fun_kwargs): - """ Fit model defined by a function to data (x,y) """ - # Setup function - self.set_model(func, **fun_kwargs) - # Fit data to model - self.fit_data(x, y, p0, bounds) - - def clean_data(self,x,y): - x=np.asarray(x) - y=np.asarray(y) - bNaN=~np.isnan(y) - y=y[bNaN] - x=x[bNaN] - bNaN=~np.isnan(x) - y=y[bNaN] - x=x[bNaN] - self.data['x']=x - self.data['y']=y - return x,y - - def fit_data(self, x, y, p0=None, bounds=None): - """ fit data, assuming a model is already setup""" - if self.model['model_function'] is None: - raise Exception('Call set_function first') - - # Cleaning data, and store it in object - x,y=self.clean_data(x,y) - - # nParams - if isinstance(p0 ,six.string_types): - p0=extract_key_num(p0) - if len(p0)==0: - p0=None - if p0 is not None: - if hasattr(p0,'__len__'): - nParams=len(p0) - else: - nParams=1 - elif self.model['coeffs'] is not None: - nParams=len(self.model['coeffs']) - else: - raise Exception('Initial guess `p0` needs to be provided since we cant infer the size of the model coefficients.') - if self.model['coeffs'] is not None: - if len(self.model['coeffs'])!=nParams: - raise Exception('Inconsistent dimension between model guess (size {}) and the model parameters (size {})'.format(nParams,len(self.model['coeffs']))) - - # Bounds - self.setup_bounds(bounds,nParams) - - # Initial conditions - self.setup_guess(p0,self.model['bounds'],nParams) - - # Fitting - minimize_me = lambda x, *p : self.model['model_function'](x, p, **self.model['consts']) - pfit, pcov = so.curve_fit(minimize_me, x, y, p0=self.model['coeffs_init'], bounds=self.model['bounds']) - - # --- Reporting information about the fit (after the fit) - y_fit = self.model['model_function'](x, pfit, **self.model['consts']) - self.store_fit_info(y_fit, pfit) - - # --- Return a fitted function - self.model['fitted_function'] = lambda xx: self.model['model_function'](xx, pfit, **self.model['consts']) - - def store_fit_info(self, y_fit, pfit): - # --- Reporting information about the fit (after the fit) - self.data['y_fit']=y_fit - self.model['R2'] = rsquare(self.data['y'], y_fit) - if self.model['coeffs'] is not None: - if not isinstance(self.model['coeffs'], OrderedDict): - raise Exception('Coeffs need to be of type OrderedDict') - for k,v in zip(self.model['coeffs'].keys(), pfit): - self.model['coeffs'][k]=v - - # Replace numerical values in formula - if self.model['formula'] is not None: - formula_num=self.model['formula'] - for k,v in self.model['coeffs'].items(): - formula_num = formula_num.replace('{'+k+'}',str(v)) - for k,v in self.model['consts'].items(): - formula_num = formula_num.replace('{'+k+'}',str(v)) - self.model['formula_num'] = formula_num - - def formula_num(self, fmt=None): - """ return formula with coeffs and consts evaluted numerically""" - if fmt is None: - fmt_fun = lambda x: str(x) - elif isinstance(fmt,six.string_types): - fmt_fun = lambda x: ('{'+fmt+'}').format(x) - elif callable(fmt): - fmt_fun = fmt - formula_num=self.model['formula'] - for k,v in self.model['coeffs'].items(): - formula_num = formula_num.replace('{'+k+'}',fmt_fun(v)) - for k,v in self.model['consts'].items(): - formula_num = formula_num.replace('{'+k+'}',fmt_fun(v)) - return formula_num - - - - def plot(self, x=None, fig=None, ax=None): - if x is None: - x=self.data['x'] - - sFormula = _clean_formula(self.model['formula'],latex=True) - - import matplotlib.pyplot as plt - import matplotlib.patches as mpatches - - if fig is None: - fig,ax = plt.subplots(1, 1, sharey=False, figsize=(6.4,4.8)) # (6.4,4.8) - fig.subplots_adjust(left=0.12, right=0.95, top=0.95, bottom=0.11, hspace=0.20, wspace=0.20) - - ax.plot(self.data['x'], self.data['y'], '.', label='Data') - ax.plot(x, self.model['fitted_function'](x), '-', label='Model ' + sFormula) - - # Add extra info to the legend - handles, labels = ax.get_legend_handles_labels() # get existing handles and labels - empty_patch = mpatches.Patch(color='none', label='Extra label') # create a patch with no color - for k,v in self.model['coeffs'].items(): - handles.append(empty_patch) # add new patches and labels to list - labels.append(r'${:s}$ = {}'.format(pretty_param(k),pretty_num_short(v))) - handles.append(empty_patch) # add new patches and labels to list - labels.append('$R^2$ = {}'.format(pretty_num_short(self.model['R2']))) - ax.legend(handles, labels) - - - #ax.set_xlabel('') - #ax.set_ylabel('') - return fig,ax - - def print_guessbounds(self): - s='' - p0 = self.model['coeffs_init'] - bounds = self.model['bounds'] - for i,(k,v) in enumerate(self.model['coeffs'].items()): - print( (pretty_num(bounds[0][i]),pretty_num(p0[i]), pretty_num(bounds[1][i])) ) - s+='{:15s}: {:10s} < {:10s} < {:10s}\n'.format(k, pretty_num(bounds[0][i]),pretty_num(p0[i]), pretty_num(bounds[1][i])) - print(s) - - - def __repr__(self): - s='<{} object> with fields:\n'.format(type(self).__name__) - s+=' - data, dictionary with keys: \n' - s+=' - x: [{} ... {}], n: {} \n'.format(self.data['x'][0],self.data['x'][-1],len(self.data['x'])) - s+=' - y: [{} ... {}], n: {} \n'.format(self.data['y'][0],self.data['y'][-1],len(self.data['y'])) - s+=' - model, dictionary with keys: \n' - for k,v in self.model.items(): - s=s+' - {:15s}: {}\n'.format(k,v) - return s - - -# --------------------------------------------------------------------------------} -# --- Wrapper for predefined fitters -# --------------------------------------------------------------------------------{ -class PredefinedModelFitter(ModelFitter): - def __init__(self, x=None, y=None, p0=None, bounds=None, **kwargs): - ModelFitter.__init__(self,x=None, y=None, p0=p0, bounds=bounds) # NOTE: not passing data - - self.kwargs=kwargs - - if x is not None and y is not None: - self.fit_data(x,y,p0,bounds) - - def setup_model(self): - """ - Setup model: - - guess/coeffs_init: return params in format needed for curve_fit (p0,p1,p2,p3) - - bound : bounds in format needed for curve_fit ((low0,low1,low2), (high0, high1)) - - coeffs : OrderedDict, necessary for user print - - formula : necessary for user print - """ - #self.model['coeffs'] = OrderedDict([(var,1) for i,var in enumerate(variables)]) - #self.model['formula'] = '' - #self.model['coeffs_init']=p_guess - #self.model['bounds']=bounds_guess - raise NotImplementedError('To be implemented by child class') - - def model_function(self, x, p): - raise NotImplementedError('To be implemented by child class') - - def fit_data(self, x, y, p0=None, bounds=None): - # Cleaning data - x,y=self.clean_data(x,y) - - # --- setup model - # guess initial parameters, potential bounds, and set necessary data - self.setup_model() - - # --- Minimization - minimize_me = lambda x, *p : self.model_function(x, p) - if self.model['bounds'] is None: - pfit, pcov = so.curve_fit(minimize_me, x, y, p0=self.model['coeffs_init']) - else: - pfit, pcov = so.curve_fit(minimize_me, x, y, p0=self.model['coeffs_init'], bounds=self.model['bounds']) - # --- Reporting information about the fit (after the fit) - # And Return a fitted function - y_fit = self.model_function(x, pfit) - self.model['fitted_function']=lambda xx : self.model_function(xx, pfit) - self.store_fit_info(y_fit, pfit) - - def plot_guess(self, x=None, fig=None, ax=None): - """ plotthe guess values""" - if x is None: - x=self.data['x'] - import matplotlib.pyplot as plt - if fig is None: - fig,ax = plt.subplots(1, 1, sharey=False, figsize=(6.4,4.8)) # (6.4,4.8) - fig.subplots_adjust(left=0.12, right=0.95, top=0.95, bottom=0.11, hspace=0.20, wspace=0.20) - - p_guess = self.model['coeffs_init'] - - ax.plot(self.data['x'], self.data['y'] , '.', label='Data') - ax.plot(x, self.model_function(x,p_guess), '-', label='Model at guessed parameters') - ax.legend() - - -# --------------------------------------------------------------------------------} -# --- Predefined fitters -# --------------------------------------------------------------------------------{ -class SecondOrderFitterImpulse(PredefinedModelFitter): - - def model_function(self, x, p): - return secondorder_impulse(x, p) - - def setup_model(self): - """ p = (A, omega0, zeta, B, t0) """ - self.model['coeffs'] = OrderedDict([('A',1),('omega',1),('zeta',0.01),('B',0),('t0',0)]) - self.model['formula'] = '{A}*exp(-{zeta}*{omega}*(x-{x0}))*sin({omega}*sqrt(1-{zeta}**2)))+{B}' - - # --- Guess Initial values - x, y = self.data['x'],self.data['y'] - # TODO use signal - dt = x[1]-x[0] - omega0 = main_frequency(x,y) - A = np.max(y) - np.min(y) - B = np.mean(y) - zeta = 0.1 - y_start = y[0]+0.01*A - bDeviate = np.argwhere(abs(y-y_start)>abs(y_start-y[0]))[0] - t0 = x[bDeviate[0]] - p_guess = np.array([A, omega0, zeta, B, t0]) - self.model['coeffs_init'] = p_guess - # --- Set Bounds - T = x[-1]-x[0] - dt = x[1]-x[0] - om_min = 2*np.pi/T/2 - om_max = 2*np.pi/dt/2 - b_A = (A*0.1,A*3) - b_om = (om_min,om_max) - b_zeta = (0,1) - b_B = (np.min(y),np.max(y)) - b_x0 = (np.min(x),np.max(x)) - self.model['bounds'] = ((b_A[0],b_om[0],b_zeta[0],b_B[0],b_x0[0]),(b_A[1],b_om[1],b_zeta[1],b_B[1],b_x0[1])) - #self.plot_guess(); import matplotlib.pyplot as plt; plt.show() - #self.print_guessbounds(); - -class SecondOrderFitterStep(PredefinedModelFitter): - - def model_function(self, x, p): - return secondorder_step(x, p) - - def setup_model(self): - """ p = (A, omega0, zeta, B, t0) """ - self.model['coeffs'] = OrderedDict([('A',1),('omega',1),('zeta',0.01),('B',0),('t0',0)]) - self.model['formula'] ='{A}*(1-exp(-{zeta}*{omega}*(x-{x0}))/sqrt(1-{zeta}**2) * cos({omega}*sqrt(1-{zeta}**2)-arctan({zeta}/sqrt(1-{zeta}**2)))) +{B}' - # --- Guess Initial values - x, y = self.data['x'],self.data['y'] - # TODO use signal - omega0 = main_frequency(x,y) - A = np.max(y) - np.min(y) - B = y[0] - zeta = 0.1 - y_start = y[0]+0.01*A - bDeviate = np.argwhere(abs(y-y_start)>abs(y_start-y[0]))[0] - t0 = x[bDeviate[0]] - p_guess = np.array([A, omega0, zeta, B, t0]) - self.model['coeffs_init'] = p_guess - # --- Set Bounds - T = x[-1]-x[0] - dt = x[1]-x[0] - om_min = 2*np.pi/T/2 - om_max = 2*np.pi/dt/2 - b_A = (A*0.1,A*3) - b_om = (om_min,om_max) - b_zeta = (0,1) - b_B = (np.min(y),np.max(y)) - b_x0 = (np.min(x),np.max(x)) - self.model['bounds'] = ((b_A[0],b_om[0],b_zeta[0],b_B[0],b_x0[0]),(b_A[1],b_om[1],b_zeta[1],b_B[1],b_x0[1])) - #self.plot_guess(); import matplotlib.pyplot as plt; plt.show() - #self.print_guessbounds(); - -# --------------------------------------------------------------------------------} -# --- Predefined fitter -# --------------------------------------------------------------------------------{ -class ContinuousPolynomialFitter(ModelFitter): - def __init__(self,order=None, x=None, y=None, p0=None, bounds=None): - ModelFitter.__init__(self,x=None, y=None, p0=p0, bounds=bounds) - self.setOrder(int(order)) - if order is not None and x is not None and y is not None: - self.fit_data(x,y,p0,bounds) - - def setOrder(self, order): - self.order=order - if order is not None: - variables= string.ascii_lowercase[:order+1] - self.model['coeffs'] = OrderedDict([(var,1) for i,var in enumerate(variables)]) - formula = ' + '.join(['{}*x**{}'.format('{'+var+'}',order-i) for i,var in enumerate(variables)]) - self.model['formula'] = _clean_formula(formula) - - def fit_data(self, x, y, p0=None, bounds=None): - if self.order is None: - raise Exception('Polynomial Fitter not set, call function `setOrder` to set order') - # Cleaning data - x,y=self.clean_data(x,y) - - nParams=self.order+1 - # Bounds - self.setup_bounds(bounds, nParams) # TODO - # Initial conditions - self.setup_guess(p0, bounds, nParams) # TODO - - # Fitting - pfit = np.polyfit(x,y,self.order) - - # --- Reporting information about the fit (after the fit) - y_fit = np.polyval(pfit,x) - self.store_fit_info(y_fit, pfit) - - # --- Return a fitted function - self.model['fitted_function']=lambda xx : np.polyval(pfit,xx) - - -class DiscretePolynomialFitter(ModelFitter): - def __init__(self,exponents=None, x=None, y=None, p0=None, bounds=None): - ModelFitter.__init__(self,x=None, y=None, p0=p0, bounds=bounds) - self.setExponents(exponents) - if exponents is not None and x is not None and y is not None: - self.fit_data(x,y,p0,bounds) - - def setExponents(self, exponents): - self.exponents=exponents - if exponents is not None: - #exponents=-np.sort(-np.asarray(exponents)) - self.exponents=exponents - variables= string.ascii_lowercase[:len(exponents)] - self.model['coeffs'] = OrderedDict([(var,1) for i,var in enumerate(variables)]) - formula = ' + '.join(['{}*x**{}'.format('{'+var+'}',e) for var,e in zip(variables,exponents)]) - self.model['formula'] = _clean_formula(formula) - - def fit_data(self, x, y, p0=None, bounds=None): - if self.exponents is None: - raise Exception('Polynomial Fitter not set, call function `setExponents` to set exponents') - # Cleaning data, and store it in object - x,y=self.clean_data(x,y) - - nParams=len(self.exponents) - # Bounds - self.setup_bounds(bounds, nParams) # TODO - # Initial conditions - self.setup_guess(p0, bounds, nParams) # TODO - - X_poly=np.array([]) - for i,e in enumerate(self.exponents): - if i==0: - X_poly = np.array([x**e]) - else: - X_poly = np.vstack((X_poly,x**e)) - try: - pfit = np.linalg.lstsq(X_poly.T, y, rcond=None)[0] - except: - pfit = np.linalg.lstsq(X_poly.T, y) - - # --- Reporting information about the fit (after the fit) - y_fit= np.dot(pfit, X_poly) - self.store_fit_info(y_fit, pfit) - - # --- Return a fitted function - def fitted_function(xx): - y=np.zeros(xx.shape) - for i,(e,c) in enumerate(zip(self.exponents,pfit)): - y += c*x**e - return y - self.model['fitted_function']=fitted_function - - -class SinusoidFitter(ModelFitter): - def __init__(self, physical=False, x=None, y=None, p0=None, bounds=None): - ModelFitter.__init__(self, x=None, y=None, p0=p0, bounds=bounds) - #self.setOrder(int(order)) - self.physical=physical - if physical: - self.model['coeffs'] = OrderedDict([('A',1),('f',1),('phi',0),('B',0)]) - self.model['formula'] = '{A} * sin(2*pi*({f}*x + {phi}/360)) + {B}' - else: - self.model['coeffs'] = OrderedDict([('A',1),('omega',1),('phi',0),('B',0)]) - self.model['formula'] = '{A} * sin({omega}*x + {phi}) + {B}' - - if x is not None and y is not None: - self.fit_data(x,y,p0,bounds) - - def fit_data(self, x, y, p0=None, bounds=None): - # Cleaning data - x,y=self.clean_data(x,y) - - # TODO use signal - guess_freq= main_frequency(x,y)/(2*np.pi) # [Hz] - guess_amp = np.std(y) * 2.**0.5 - guess_offset = np.mean(y) - if self.physical: - guess = np.array([guess_amp, guess_freq, 0., guess_offset]) - minimize_me = lambda x, *p : sinusoid_f(x, p) - else: - guess = np.array([guess_amp, 2.*np.pi*guess_freq, 0., guess_offset]) - minimize_me = lambda x, *p : sinusoid(x, p) - self.model['coeffs_init'] = guess - - pfit, pcov = so.curve_fit(minimize_me, x, y, p0=guess) - - # --- Reporting information about the fit (after the fit) - # And Return a fitted function - if self.physical: - y_fit = sinusoid_f(x, pfit) - self.model['fitted_function']=lambda xx : sinusoid_f(xx, pfit) - else: - y_fit = sinusoid(x, pfit) - self.model['fitted_function']=lambda xx : sinusoid(xx, pfit) - self.store_fit_info(y_fit, pfit) - - - -class GeneratorTorqueFitter(ModelFitter): - def __init__(self,x=None, y=None, p0=None, bounds=None): - ModelFitter.__init__(self,x=None, y=None, p0=p0, bounds=bounds) - -# RtGnSp, RtTq , Rgn2K , SlPc , SpdGenOn = p -# {'label':'Generator Torque', 'handle': gentorque, 'id':'predef: gentorque', -# 'formula': '{RtGnSp} , {RtTq} , {Rgn2K} , {SlPc} , {SpdGenOn}', - self.model['coeffs']= extract_key_num('RtGnSp=100 , RtTq=1000 , Rgn2K=0.01 ,SlPc=5 , SpdGenOn=0') -# 'consts' :None, -# 'bounds' :'RtGnSp=(0.1,inf) , RtTq=(1,inf), Rgn2K=(0.0,0.1) ,SlPc=(0,20) , SpdGenOn=(0,inf)'} - if x is not None and y is not None: - self.fit_data(x,y,p0,bounds) - - def fit_data(self, x, y, p0=None, bounds=None): - #nParams=5 - ## Bounds - #self.setup_bounds(bounds,nParams) # TODO - ## Initial conditions - #self.setup_guess(p0,bounds,nParams) # TODO - - # Cleaning data, and store it in object - x,y=self.clean_data(x,y) - - I = np.argsort(x) - x=x[I] - y=y[I] - - # Estimating deltas - xMin, xMax=np.min(x),np.max(x) - yMin, yMax=np.min(y),np.max(y) - DeltaX = (xMax-xMin)*0.02 - DeltaY = (yMax-yMin)*0.02 - - # Binning data - x_bin=np.linspace(xMin,xMax,min(200,len(x))) - x_lin=x_bin[0:-1]+np.diff(x_bin) - #y_lin=np.interp(x_lin,x,y) # TODO replace by bining - y_lin = np.histogram(y, x_bin, weights=y)[0]/ np.histogram(y, x_bin)[0] - y_lin, _, _ = stats.binned_statistic(x, y, statistic='mean', bins=x_bin) - x_lin, _, _ = stats.binned_statistic(x, x, statistic='mean', bins=x_bin) - bNaN=~np.isnan(y_lin) - y_lin=y_lin[bNaN] - x_lin=x_lin[bNaN] - - # --- Find good guess of parameters based on data - # SpdGenOn - iOn = np.where(y>0)[0][0] - SpdGenOn_0 = x[iOn] - SpdGenOn_Bnds = (max(x[iOn]-DeltaX,xMin), min(x[iOn]+DeltaX,xMax)) - # Slpc - Slpc_0 = 5 - Slpc_Bnds = (0,10) - # RtTq - RtTq_0 = yMax - RtTq_Bnds = (yMax-DeltaY, yMax+DeltaY) - # RtGnSp - iCloseRt = np.where(y>yMax*0.50)[0][0] - RtGnSp_0 = x[iCloseRt] - RtGnSp_Bnds = ( RtGnSp_0 -DeltaX*2, RtGnSp_0+DeltaX*2) - # Rgn2K - #print('>>>',SpdGenOn_0, RtGnSp_0) - bR2=np.logical_and(x>SpdGenOn_0, x ['a','b'] - The variables are replaced with p[0],..,p[n] in order of appearance - """ - regex = r"\{(.*?)\}" - matches = re.finditer(regex, sFormula, re.DOTALL) - formula_eval=sFormula - variables=[] - ivar=0 - for i, match in enumerate(matches): - for groupNum in range(0, len(match.groups())): - var = match.group(1) - if var not in variables: - variables.append(var) - formula_eval = formula_eval.replace('{'+match.group(1)+'}','p[{:d}]'.format(ivar)) - ivar+=1 - return variables, formula_eval - - -def extract_key_tuples(text): - """ - all=(0.1,-2),b=(inf,0), c=(-inf,0.3e+10) - """ - if text is None: - return {} - regex = re.compile(r'(?P[\w\-]+)=\((?P[0-9+epinf.-]*?),(?P[0-9+epinf.-]*?)\)($|,)') - return {match.group("key"): (float(match.group("value1")),float(match.group("value2"))) for match in regex.finditer(text.replace(' ',''))} - -def extract_key_num(text): - """ - all=0.1, b=inf, c=-0.3e+10 - """ - if text is None: - return {} - regex = re.compile(r'(?P[\w\-]+)=(?P[0-9+epinf.-]*?)($|,)') - return OrderedDict([(match.group("key"), float(match.group("value"))) for match in regex.finditer(text.replace(' ',''))]) - -def extract_key_miscnum(text): - """ - all=0.1, b=(inf,0), c=[-inf,0.3e+10,10,11]) - """ - def isint(s): - try: - int(s) - return True - except: - return False - - if text is None: - return {} - sp=re.compile('([\w]+)=').split(text.replace(' ','')) - if len(sp)<3: - return {} - sp=sp[1:] - keys = sp[0::2] - values = sp[1::2] - d={} - for (k,v) in zip(keys,values): - if v.find('(')>=0: - v=v.replace('(','').replace(')','') - v=v.split(',') - vect=tuple([float(val) for val in v if len(val.strip())>0]) - elif v.find('[')>=0: - v=v.replace('[','').replace(']','') - v=v.split(',') - vect=[int(val) if isint(val) else float(val) for val in v if len(val.strip())>0] # NOTE returning lists - elif v.find('True')>=0: - v=v.replace(',','').strip() - vect=True - elif v.find('False')>=0: - v=v.replace(',','').strip() - vect=False - else: - v=v.replace(',','').strip() - vect=int(v) if isint(v) else float(v) - d[k]=vect - return d - -def set_common_keys(dict_target, dict_source): - """ Set a dictionary using another one, missing keys in source dictionary are reported""" - keys_missing=[] - for k in dict_target.keys(): - if k in dict_source.keys(): - dict_target[k]=dict_source[k] - else: - keys_missing.append(k) - return dict_target, keys_missing - -def _clean_formula(s, latex=False): - s = s.replace('+-','-').replace('**1','').replace('*x**0','') - s = s.replace('np.','') - if latex: - #s = s.replace('{','$').replace('}','$') - s = s.replace('phi',r'\phi') - s = s.replace('alpha',r'\alpha') - s = s.replace('beta' ,r'\alpha') - s = s.replace('zeta' ,r'\zeta') - s = s.replace('mu' ,r'\mu' ) - s = s.replace('pi' ,r'\pi' ) - s = s.replace('sigma',r'\sigma') - s = s.replace('omega',r'\omega') - s = s.replace('_ref',r'_{ref}') # make this general - s = s.replace(r'(',r'{(') - s = s.replace(r')',r')}') - s = s.replace(r'**',r'^') - s = s.replace(r'*', '') - s = s.replace('sin',r'\sin') - s = s.replace('exp',r'\exp') - s = s.replace('sqrt',r'\sqrt') - s = r'$'+s+r'$' - else: - s = s.replace('{','').replace('}','') - return s - - -def main_frequency(t,y): - """ - Returns main frequency of a signal - NOTE: this tool below to welib.tools.signal, but put here for convenience - """ - dt = t[1]-t[0] # assume uniform spacing of time and frequency - om = np.fft.fftfreq(len(t), (dt))*2*np.pi - Fyy = abs(np.fft.fft(y)) - omega = abs(om[np.argmax(Fyy[1:])+1]) # exclude the zero frequency (mean) - return omega - -def rsquare(y, f): - """ Compute coefficient of determination of data fit model and RMSE - [r2] = rsquare(y,f) - RSQUARE computes the coefficient of determination (R-square) value from - actual data Y and model data F. - INPUTS - y : Actual data - f : Model fit - OUTPUT - R2 : Coefficient of determination - """ - # Compare inputs - if not np.all(y.shape == f.shape) : - raise Exception('Y and F must be the same size') - # Check for NaN - tmp = np.logical_not(np.logical_or(np.isnan(y),np.isnan(f))) - y = y[tmp] - f = f[tmp] - R2 = max(0,1-np.sum((y-f)**2)/np.sum((y-np.mean(y))** 2)) - return R2 - -def pretty_param(s): - if s in ['alpha','beta','delta','gamma','epsilon','zeta','lambda','mu','nu','pi','rho','sigma','phi','psi','omega']: - s = r'\{}'.format(s) - s = s.replace('_ref',r'_{ref}') # make this general.. - return s - -def pretty_num(x): - if abs(x)<1000 and abs(x)>1e-4: - return "{:9.4f}".format(x) - else: - return '{:.3e}'.format(x) - -def pretty_num_short(x,digits=3): - if digits==4: - if abs(x)<1000 and abs(x)>1e-1: - return "{:.4f}".format(x) - else: - return "{:.4e}".format(x) - elif digits==3: - if abs(x)<1000 and abs(x)>1e-1: - return "{:.3f}".format(x) - else: - return "{:.3e}".format(x) - elif digits==2: - if abs(x)<1000 and abs(x)>1e-1: - return "{:.2f}".format(x) - else: - return "{:.2e}".format(x) - - -if __name__ == '__main__': - # --- Writing example models to file for pyDatView tests - a,b,c = 2.0, 3.0, 4.0 - u_ref,z_ref,alpha=10,12,0.12 - mu,sigma=0.5,1.2 - x = np.linspace(0.1,30,20) - A,k,B=0.5,1.2,10 - y_exp=expdecay(x,(A,k,B)) - A, k = 10, 2.3, - y_weib=weibull_pdf(x,(A,k)) - y_log=logarithmic(x,(a,b)) - exponents=[0,3,5] - y_poly = a + b*x**3 + c*x**5 - y_power=powerlaw_all(x,(alpha,u_ref,z_ref)) - y_gauss=gaussian(x,(mu,sigma)) - A= 101; B= -200.5; omega = 0.4; phi = np.pi/3 - y_sin=sinusoid(x,(A,omega,phi,B)) + np.random.normal(0, 0.1, len(x)) - M=np.column_stack((x,y_poly,y_power,y_gauss,y_gauss+10,y_weib,y_exp,y_log,y_sin)) - np.savetxt('../TestFit.csv',M,header='x,poly,power,gauss,gauss_off,weib,expdecay,log,sin',delimiter=',') +""" +Set of tools to fit a model to data. + +The quality of a fit is usually a strong function of the initial guess. +Because of this this package contains different kind of "helpers" and "wrapper" tools. + +FUNCTIONS +--------- + +This package can help fitting using: + 1) High level functions, e.g. fit_sinusoid +OR using the `model_fit` function that handles: + 2) User defined "eval" model, e.g. the user sets a string '{a}*x + {b}*x**2' + 3) Predefined models, e.g. Gaussian, logarithmic, weibull_pdf, etc. + 4) Predefined fitters, e.g. SinusoidFitter, DiscretePolynomialFitter, ContinuousPolynomialFitter + +1) The high level fitting functions available are: + - fit_sinusoid + - fit_polynomial + - fit_gaussian + +2) User defined model, using the `model_fit_function`: + - model_fit('eval: {a} + {b}*x**3 + {c}*x**5', x, y) + - model_fit('eval: {u_ref}*(x/{z_ref})**{alpha}', x, y, p0=(8,9,0.1), bounds=(0.001,100)) + User defined models, will require the user to provide an initial guess and potentially bounds + +3) Fitting using predefined models using the `model_fit` function : + - model_fit('predef: gaussian', x, y) + - model_fit('predef: gaussian-yoff', x, y) + - model_fit('predef: powerlaw_alpha', x, y, p0=(0.1), **fun_kwargs) + - model_fit('predef: powerlaw_u_alpha', x, y, **fun_kwargs) + - model_fit('predef: expdecay', x, y) + - model_fit('predef: weibull_pdf', x, y) + Predefined models have default values for bounds and guesses that can be overriden. + +4) Predefined fitters, wrapped with the `model_fit` function: + - model_fit('fitter: sinusoid', x, y) + - model_fit('fitter: polynomial_discrete', x, y, exponents=[0,2,4]) + - model_fit('fitter: polynomial_continuous', x, y, order=3) + Predefined fitters can handle bounds/initial guess better + +INPUTS: +-------- +All functions have the following inputs: + - x: array on the x-axis + - y: values on the y-axis (to be fitted against a model) +Additionally some functions have the following inputs: + - p0: initial values for parameters, either a string or a dict: + - string: the string is converted to a dictionary, assuming key value pairs + example: 'a=0, b=1.3' + - dictionary, then keys should corresponds to the parameters of the model + example: {'a':0, 'b':1.3} + - bounds: bounds for each parameters, either a string or a dictionary. + NOTE: pi and inf are available to set bounds + - if a string, the string is converted to a dictionary assuming key value pairs + example: 'a=(0,3), b=(-inf,pi)' + - if a dictionary, the keys should corresponds to the parameters of the model + example: {'a':(0,3), 'b':(-inf,pi)} + +OUTPUTS: +-------- +All functions returns the same outputs: + - y_fit : the fit to the y data + - pfit : the list of parameters used + - fitter: a `ModelFitter` object useful to manipulate the fit, in particular: + - fitter.model: dictionary with readable versions of the parameters, formula, + function to reevaluate the fit on a different x, etc. + - fitter.data: data used for the fit + - fitter.fit_data: perform another fit using different data + +MISC +---- +High-level fitters, predefined models or fitters can be added to this class. + +""" +import numpy as np +import scipy.optimize as so +import scipy.stats as stats +import string +import re +from collections import OrderedDict +from numpy import sqrt, pi, exp, cos, sin, log, inf, arctan # for user convenience +import six + +# --------------------------------------------------------------------------------} +# --- High level fitters +# --------------------------------------------------------------------------------{ +def fit_sinusoid(x,y,physical=False): + """ Fits a sinusoid to y with formula: + if physical is False: y_fit=A*sin(omega*x+phi)+B + if physical is True: y_fit=A*sin(2*pi(f+phi/360))+B """ + y_fit, pfit, fitter = model_fit('fitter: sinusoid', x, y, physical=physical) + return y_fit, pfit, fitter + +def fit_polynomial(x, y, order=None, exponents=None): + """ Fits a polynomial to y, either: + - full up to a given order: y_fit= {a_i} x^i , i=0..order + - or using a discrete set of exponents: y_fit= {a_i} x^e[i], i=0,..len(exponents) + OPTIONAL INPUTS: + - order: integer + Maximum order of polynomial, e.g. 2: for a x**0 + b x**1 + c x**2 + - exponents: array-like + Exponents to be used. e.g. [0,2,5] for a x**0 + b x**2 + c x**5 + """ + if order is not None: + y_fit, pfit, fitter = model_fit('fitter: polynomial_continuous', x, y, order=order) + else: + y_fit, pfit, fitter = model_fit('fitter: polynomial_discrete', x, y, exponents=exponents) + return y_fit, pfit, fitter + +def fit_gaussian(x, y, offset=False): + """ Fits a gaussin to y, with the following formula: + offset is True : '1/({sigma}*sqrt(2*pi)) * exp(-1/2 * ((x-{mu})/{sigma})**2)' + offset is False: '1/({sigma}*sqrt(2*pi)) * exp(-1/2 * ((x-{mu})/{sigma})**2) + {y0}' + """ + if offset: + return model_fit('predef: gaussian-yoff', x, y) + else: + return model_fit('predef: gaussian', x, y) + +# --------------------------------------------------------------------------------} +# --- Simple mid level fitter +# --------------------------------------------------------------------------------{ +def fit_polynomial_continuous(x, y, order): + """Fit a polynomial with a continuous set of exponents up to a given order + + Parameters + ---------- + x,y: see `model_fit` + order: integer + Maximum order of polynomial, e.g. 2: for a x**0 + b x**1 + c x**2 + + Returns + ------- + see `model_fit` + """ + pfit = np.polyfit(x,y,order) + y_fit = np.polyval(pfit,x) + + # coeffs_dict, e.g. {'a':xxx, 'b':xxx}, formula = 'a*x + b' + variables = string.ascii_lowercase[:order+1] + coeffs_dict = OrderedDict([(var,coeff) for i,(coeff,var) in enumerate(zip(pfit,variables))]) + formula = ' + '.join(['{}*x**{}'.format(var,order-i) for i,var in enumerate(variables)]) + formula = _clean_formula(formula) + + return y_fit,pfit,{'coeffs':coeffs_dict,'formula':formula,'fitted_function':lambda xx : np.polyval(pfit,xx)} + +def fit_polynomial_discrete(x, y, exponents): + """Fit a polynomial with a discrete set of exponents + + Parameters + ---------- + x,y: see `model_fit` + exponents: array-like + Exponents to be used. e.g. [0,2,5] for a x**0 + b x**2 + c x**5 + + Returns + ------- + see `model_fit` + """ + #exponents=-np.sort(-np.asarray(exponents)) + X_poly=np.array([]) + for i,e in enumerate(exponents): + if i==0: + X_poly = np.array([x**e]) + else: + X_poly = np.vstack((X_poly,x**e)) + try: + pfit = np.linalg.lstsq(X_poly.T, y, rcond=None)[0] + except: + pfit = np.linalg.lstsq(X_poly.T, y) + y_fit= np.dot(pfit, X_poly) + + variables = string.ascii_lowercase[:len(exponents)] + coeffs_dict = OrderedDict([(var,coeff) for i,(coeff,var) in enumerate(zip(pfit,variables))]) + formula = ' + '.join(['{}*x**{}'.format(var,e) for var,e in zip(variables,exponents)]) + formula = _clean_formula(formula) + + return y_fit,pfit,{'coeffs':coeffs_dict,'formula':formula} + + +def fit_powerlaw_u_alpha(x, y, z_ref=100, p0=(10,0.1)): + """ + p[0] : u_ref + p[1] : alpha + """ + pfit, _ = so.curve_fit(lambda x, *p : p[0] * (x / z_ref) ** p[1], x, y, p0=p0) + y_fit = pfit[0] * (x / z_ref) ** pfit[1] + coeffs_dict=OrderedDict([('u_ref',pfit[0]),('alpha',pfit[1])]) + formula = '{u_ref} * (z / {z_ref}) ** {alpha}' + fitted_fun = lambda xx: pfit[0] * (xx / z_ref) ** pfit[1] + return y_fit, pfit, {'coeffs':coeffs_dict,'formula':formula,'fitted_function':fitted_fun} + + +# --------------------------------------------------------------------------------} +# --- Predifined functions NOTE: they need to be registered in variable `MODELS` +# --------------------------------------------------------------------------------{ +def gaussian(x, p): + """ p = (mu,sigma) """ + return 1/(p[1]*np.sqrt(2*np.pi)) * np.exp(-1/2*((x-p[0])/p[1])**2) + +def gaussian_w_offset(x, p): + """ p = (mu,sigma,y0) """ + return 1/(p[1]*np.sqrt(2*np.pi)) * np.exp(-1/2*((x-p[0])/p[1])**2) + p[2] + +def logarithmic(x, p): + """ p = (a,b) """ + return p[0]*np.log(x)+p[1] + +def powerlaw_all(x, p): + """ p = (alpha,u_ref,z_ref) """ + return p[1] * (x / p[2]) ** p[0] + +def powerlaw_alpha(x, p, u_ref=10, z_ref=100): + """ p = alpha """ + return u_ref * (x / z_ref) ** p[0] + +def powerlaw_u_alpha(x, p, z_ref=100): + """ p = (alpha, u_ref) """ + return p[1] * (x / z_ref) ** p[0] + +def expdecay(x, p, z_ref=100): + """ p = (A, k, B) formula: {A}*exp(-{k}*x)+{B} """, + return p[0]* np.exp(-p[1]*x) + p[2] + +def weibull_pdf(x, p, z_ref=100): + """ p = (A, k) formula: {k}*x**({k}-1) / {A}**{k} * np.exp(-x/{A})**{k} """, + # NOTE: if x is 0, a divide by zero error is incountered if p[1]-1<0 + p=list(p) + return p[1] * x ** (p[1] - 1) / p[0] ** p[1] * np.exp(-(x / p[0]) ** p[1]) + +def sinusoid(x, p): + """ p = (A,omega,phi,B) """ + return p[0]*np.sin(p[1]*x+p[2]) + p[3] +def sinusoid_f(x, p): + """ p = (A,f,phi_deg,B) """ + return p[0]*np.sin(2*pi*(p[1]*x+p[2]/360)) + p[3] + + + +def secondorder_impulse(t, p): + """ p = (A, omega0, zeta, B, t0) """ + A, omega0, zeta, B, t0 = p + omegad = omega0 * sqrt(1-zeta**2) + phi = np.arctan2(zeta, sqrt(1-zeta**2)) + x = np.zeros(t.shape) + bp = t>=t0 + t = t[bp]-t0 + x[bp] += A * sin(omegad * t) * exp(-zeta * omega0 * t) + x+=B + return x + +def secondorder_step(t, p): + """ p = (A, omega0, zeta, B, t0) """ + A, omega0, zeta, B, t0 = p + omegad = omega0 * sqrt(1-zeta**2) + phi = np.arctan2(zeta, sqrt(1-zeta**2)) + x = np.zeros(t.shape) + bp = t>=t0 + t = t[bp]-t0 + x[bp] += A * ( 1- exp(-zeta*omega0 *t)/sqrt(1-zeta**2) * cos(omegad*t - phi)) + x+=B + return x + + +def gentorque(x, p): + """ + INPUTS: + x: generator or rotor speed + p= (RtGnSp, RtTq , Rgn2K , SlPc , SpdGenOn) + RtGnSp Rated generator speed for simple variable-speed generator control (HSS side) (rpm) + RtTq Rated generator torque/constant generator torque in Region 3 for simple variable-speed generator control (HSS side) (N-m) + Rgn2K Generator torque constant in Region 2 for simple variable-speed generator control (HSS side) (N-m/rpm^2) + SlPc Rated generator slip percentage in Region 2 1/2 for simple variable-speed generator control (%) + + OUTPUTS: + GenTrq: Generator torque [Nm] + + """ + + # Init + RtGnSp, RtTq , Rgn2K , SlPc, SpdGenOn = p + GenTrq=np.zeros(x.shape) + + xmin,xmax=np.min(x), np.max(x) +# if RtGnSp<(xmin+xmax)*0.4: +# return GenTrq + + # Setting up different regions + xR21_Start = RtGnSp*(1-SlPc/100) + bR0 = xSpdGenOn , x=xR21_Start , x<=RtGnSp) + bR3 = x>RtGnSp + # R21 + y1, y2 = Rgn2K*xR21_Start**2, RtTq + x1, x2 = xR21_Start , RtGnSp + m=(y2-y1)/(x2-x1) + GenTrq[bR21] = m*(x[bR21]-x1) + y1 # R21 + GenTrq[bR2] = Rgn2K * x[bR2]**2 # R2 + GenTrq[bR3] = RtTq # R3 + return GenTrq + + +MODELS =[ +# {'label':'User defined model', +# 'name':'eval:', +# 'formula':'{a}*x**2 + {b}', +# 'coeffs':None, +# 'consts':None, +# 'bounds':None }, +{'label':'Gaussian', 'handle':gaussian,'id':'predef: gaussian', +'formula':'1/({sigma}*sqrt(2*pi)) * exp(-1/2 * ((x-{mu})/{sigma})**2)', +'coeffs' :'mu=0, sigma=1', # Order Important +'consts' :None, +'bounds' :None}, +{'label':'Gaussian with y-offset','handle':gaussian_w_offset,'id':'predef: gaussian-yoff', +'formula':'1/({sigma}*sqrt(2*pi)) * exp(-1/2 * ((x-{mu})/{sigma})**2) + {y0}', +'coeffs' :'mu=0, sigma=1, y0=0', #Order Important +'consts' :None, +'bounds' :'sigma=(-inf,inf), mu=(-inf,inf), y0=(-inf,inf)'}, +{'label':'Exponential', 'handle': expdecay, 'id':'predef: expdecay', +'formula':'{A}*exp(-{k}*x)+{B}', +'coeffs' :'A=1, k=1, B=0', # Order Important +'consts' :None, +'bounds' :None}, +{'label':'Logarithmic', 'handle': logarithmic, 'id':'predef: logarithmic', +'formula':'{a}*log(x)+{b}', +'coeffs' :'a=1, b=0', # Order Important +'consts' :None, +'bounds' :None}, +{'label':'2nd order impulse/decay (manual)', 'handle': secondorder_impulse, 'id':'predef: secondorder_impulse', +'formula':'{A}*exp(-{zeta}*{omega}*(x-{x0})) * sin({omega}*sqrt(1-{zeta}**2))) +{B}', +'coeffs' :'A=1, omega=1, zeta=0.001, B=0, x0=0', # Order Important +'consts' :None, +'bounds' :'A=(-inf,inf), omega=(0,100), zeta=(0,1), B=(-inf,inf), x0=(-inf,inf)'}, +{'label':'2nd order step (manual)', 'handle': secondorder_step, 'id':'predef: secondorder_step', +'formula':'{A}*(1-exp(-{zeta}*{omega}*(x-{x0}))/sqrt(1-{zeta}**2) * cos({omega}*sqrt(1-{zeta}**2)-arctan({zeta}/sqrt(1-{zeta}**2)))) +{B}', +'coeffs' :'A=1, omega=1, zeta=0.001, B=0, x0=0', # Order Important +'consts' :None, +'bounds' :'A=(-inf,inf), omega=(0,100), zeta=(0,1), B=(-inf,inf), x0=(-inf,inf)'}, + +# --- Wind Energy +{'label':'Power law (alpha)', 'handle':powerlaw_alpha, 'id':'predef: powerlaw_alpha', +'formula':'{u_ref} * (z / {z_ref}) ** {alpha}', +'coeffs' : 'alpha=0.1', # Order important +'consts' : 'u_ref=10, z_ref=100', +'bounds' : 'alpha=(-1,1)'}, +{'label':'Power law (alpha,u)', 'handle':powerlaw_u_alpha, 'id':'predef: powerlaw_u_alpha', +'formula':'{u_ref} * (z / {z_ref}) ** {alpha}', +'coeffs': 'alpha=0.1, u_ref=10', # Order important +'consts': 'z_ref=100', +'bounds': 'u_ref=(0,inf), alpha=(-1,1)'}, +# 'powerlaw_all':{'label':'Power law (alpha,u,z)', 'handle':powerlaw_all, # NOTE: not that useful +# 'formula':'{u_ref} * (z / {z_ref}) ** {alpha}', +# 'coeffs': 'alpha=0.1, u_ref=10, z_ref=100', +# 'consts': None, +# 'bounds': 'u_ref=(0,inf), alpha=(-1,1), z_ref=(0,inf)'}, +{'label':'Weibull PDF', 'handle': weibull_pdf, 'id':'predef: weibull_pdf', +'formula':'{k}*x**({k}-1) / {A}**{k} * np.exp(-x/{A})**{k}', +'coeffs' :'A=1, k=1', # Order Important +'consts' :None, +'bounds' :'A=(0.1,inf), k=(0,5)'}, +{'label':'Generator Torque', 'handle': gentorque, 'id':'predef: gentorque', +'formula': '{RtGnSp} , {RtTq} , {Rgn2K} , {SlPc} , {SpdGenOn}', +'coeffs' : 'RtGnSp=100 , RtTq=1000 , Rgn2K=0.01 ,SlPc=5 , SpdGenOn=0', # Order Important +'consts' :None, +'bounds' :'RtGnSp=(0.1,inf) , RtTq=(1,inf), Rgn2K=(0.0,0.1) ,SlPc=(0,20) , SpdGenOn=(0,inf)'} +] + +# --------------------------------------------------------------------------------} +# --- Main function wrapper +# --------------------------------------------------------------------------------{ +def model_fit(func, x, y, p0=None, bounds=None, **fun_kwargs): + """ + Parameters + ---------- + func: string or function handle + - function handle + - string starting with "fitter: ": (see variable FITTERS) + - "fitter: polynomial_continuous 5' : polyfit order 5 + - "fitter: polynomial_discrete 0 2 3 ': fit polynomial of exponents 0 2 3 + - string providing an expression to evaluate, e.g.: + - "eval: {a}*x + {b}*x**2 " + - string starting with "predef": (see variable MODELS) + - "predef: powerlaw_alpha" : + - "predef: powerlaw_all" : + - "predef: gaussian " : + + x: array of x values + y: array of y values + p0: initial values for parameters, either a string or a dict: + - if a string: the string is converted to a dictionary, assuming key value pairs + example: 'a=0, b=1.3' + - if a dictionary, then keys should corresponds to the parameters of the model + example: {'a':0, 'b':1.3} + bounds: bounds for each parameters, either a string or a dictionary. + NOTE: pi and inf are available to set bounds + - if a string, the string is converted to a dictionary assuming key value pairs + example: 'a=(0,3), b=(-inf,pi)' + - if a dictionary, the keys should corresponds to the parameters of the model + example: {'a':(0,3), 'b':(-inf,pi)} + + Returns + ------- + y_fit: array with same shape as `x` + fitted data. + pfit : fitted parameters + fitter: ModelFitter object + """ + + if isinstance(func,six.string_types) and func.find('fitter:')==0: + # --- This is a high level fitter, we call the class + # The info about the class are storred in the global variable FITTERS + # See e.g. SinusoidFitter, DiscretePolynomialFitter + predef_fitters=[m['id'] for m in FITTERS] + if func not in predef_fitters: + raise Exception('Function `{}` not defined in curve_fitting module\n Available fitters: {}'.format(func,predef_fitters)) + i = predef_fitters.index(func) + FitterDict = FITTERS[i] + consts = FITTERS[i]['consts'] + args, missing = set_common_keys(consts, fun_kwargs) + if len(missing)>0: + raise Exception('Curve fitting with `{}` requires the following arguments {}. Missing: {}'.format(func,consts.keys(),missing)) + # Calling the class + fitter = FitterDict['handle'](x=x, y=y, p0=p0, bounds=bounds, **fun_kwargs) + else: + fitter = ModelFitter(func, x, y, p0=p0, bounds=bounds, **fun_kwargs) + + pfit = [v for _,v in fitter.model['coeffs'].items()] + return fitter.data['y_fit'], pfit , fitter + + +# --------------------------------------------------------------------------------} +# --- Main Class +# --------------------------------------------------------------------------------{ +class ModelFitter(): + def __init__(self,func=None, x=None, y=None, p0=None, bounds=None, **fun_kwargs): + + self.model={ + 'name':None, 'model_function':None, 'consts':fun_kwargs, 'formula': 'unavailable', # model signature + 'coeffs':None, 'formula_num':'unavailable', 'fitted_function':None, 'coeffs_init':p0, 'bounds':bounds, # model fitting + 'R2':None, + } + self.data={'x':x,'y':y,'y_fit':None} + + if func is None: + return + self.set_model(func, **fun_kwargs) + + # Initialize function if present + # Perform fit if data and function is present + if x is not None and y is not None: + self.fit_data(x,y,p0,bounds) + + def set_model(self,func, **fun_kwargs): + if callable(func): + # We don't have much additional info + self.model['model_function'] = func + self.model['name'] = func.__name__ + pass + + elif isinstance(func,six.string_types): + if func.find('predef:')==0: + # --- Minimization from a predefined function + predef_models=[m['id'] for m in MODELS] + if func not in predef_models: + raise Exception('Predefined function `{}` not defined in curve_fitting module\n Available functions: {}'.format(func,predef_models)) + i = predef_models.index(func) + ModelDict = MODELS[i] + self.model['model_function'] = ModelDict['handle'] + self.model['name'] = ModelDict['label'] + self.model['formula'] = ModelDict['formula'] + self.model['coeffs'] = extract_key_num(ModelDict['coeffs']) + self.model['coeffs_init'] = self.model['coeffs'].copy() + self.model['consts'] = extract_key_num(ModelDict['consts']) + self.model['bounds'] = extract_key_tuples(ModelDict['bounds']) + + elif func.find('eval:')==0: + # --- Minimization from a eval string + formula=func[5:] + # Extract coeffs {a} {b} {c}, replace by p[0] + variables, formula_eval = extract_variables(formula) + nParams=len(variables) + if nParams==0: + raise Exception('Formula should contains parameters in curly brackets, e.g.: {a}, {b}, {u_1}. No parameters found in {}'.format(formula)) + + # Check that the formula evaluates + x=np.array([1,2,5])*np.sqrt(2) # some random evaluation vector.. + p=[np.sqrt(2)/4]*nParams # some random initial conditions + try: + y=eval(formula_eval) + y=np.asarray(y) + if y.shape!=x.shape: + raise Exception('The formula does not return an array of same size as the input variable x. The formula must include `x`: {}'.format(formula_eval)) + except SyntaxError: + raise Exception('The formula does not evaluate, syntax error raised: {}'.format(formula_eval)) + except ZeroDivisionError: + pass + + # Creating the actual function + def func(x, p): + return eval(formula_eval) + + self.model['model_function'] = func + self.model['name'] = 'user function' + self.model['formula'] = formula + self.model['coeffs'] = OrderedDict([(k,v) for k,v in zip(variables,p)]) + self.model['coeffs_init'] = self.model['coeffs'].copy() + self.model['consts'] = {} + self.model['bounds'] = None + + else: + raise Exception('func string needs to start with `eval:` of `predef:`, func: {}'.format(func)) + else: + raise Exception('func should be string or callable') + + if fun_kwargs is None: + return + if len(fun_kwargs)==0: + return + if self.model['consts'] is None: + raise Exception('Fun_kwargs provided, but no function constants were defined') + + self.model['consts'], missing = set_common_keys(self.model['consts'], fun_kwargs ) + if len(missing)>0: + raise Exception('Curve fitting with function `{}` requires the following arguments {}. Missing: {}'.format(func.__name__,consts.keys(),missing)) + + def setup_bounds(self, bounds, nParams): + if bounds is not None: + self.model['bounds']=bounds # store in model + bounds=self.model['bounds'] # usemodel bounds as default + if bounds is not None: + if isinstance(bounds ,six.string_types): + bounds=extract_key_tuples(bounds) + + if isinstance(bounds ,dict): + if len(bounds)==0 or 'all' in bounds.keys(): + bounds=([-np.inf]*nParams,[np.inf]*nParams) + elif self.model['coeffs'] is not None: + b1=[] + b2=[] + for k in self.model['coeffs'].keys(): + if k in bounds.keys(): + b1.append(bounds[k][0]) + b2.append(bounds[k][1]) + else: + # TODO merge default bounds + raise Exception('Bounds dictionary is missing the key: `{}`'.format(k)) + bounds=(b1,b2) + else: + raise NotImplementedError('Bounds dictionary with no known model coeffs.') + else: + # so.curve_fit needs a 2-tuple + b1,b2=bounds[0],bounds[1] + if not hasattr(b1,'__len__'): + b1=[b1]*nParams + if not hasattr(b2,'__len__'): + b2=[b2]*nParams + bounds=(b1,b2) + else: + bounds=([-np.inf]*nParams,[np.inf]*nParams) + + self.model['bounds']=bounds # store in model + + def setup_guess(self, p0, bounds, nParams): + """ + Setup initial parameter values for the fit, based on what the user provided, and potentially the bounds + + INPUTS: + - p0: initial parameter values for the fit + - if a string (e.g. " a=1, b=3"), it's converted to a dict + - if a dict, the ordered keys of model['coeffs'] are used to sort p0 + - bounds: tuple of lower and upper bounds for each parameters. + Parameters are ordered as function of models['coeffs'] + bounds[0]: lower bounds or all parameters + bounds[1]: upper bounds or all parameters + + We can assume that the bounds are set + """ + def middleOfBounds(i): + """ return middle of bounds for parameter `i`""" + bLow = bounds[0][i] + bHigh = bounds[0][2] + if (bLow,bHigh)==(-np.inf,np.inf): + p_i=0 + elif bLow==-np.inf: + p_i = -abs(bHigh)*2 + elif bHigh== np.inf: + p_i = abs(bLow)*2 + else: + p_i = (bLow+bHigh)/2 + return p_i + + if isinstance(p0 ,six.string_types): + p0=extract_key_num(p0) + if len(p0)==0: + p0=None + + if p0 is None: + # There is some tricky logic here between the priority of bounds and coeffs + if self.model['coeffs'] is not None: + # We rely on function to give us decent init coefficients + p0 = ([v for _,v in self.model['coeffs'].items()]) + elif bounds is None: + p0 = ([0]*nParams) + else: + # use middle of bounds + p0 = [0]*nParams + for i,(b1,b2) in enumerate(zip(bounds[0],bounds[1])): + p0[i] = middleOfBounds(i) + p0 = (p0) + elif isinstance(p0,dict): + # User supplied a dictionary, we use the ordered keys of coeffs to sort p0 + p0_dict=p0.copy() + if self.model['coeffs'] is not None: + p0=[] + for k in self.model['coeffs'].keys(): + if k in p0_dict.keys(): + p0.append(p0_dict[k]) + else: + raise Exception('Guess dictionary is missing the key: `{}`'.format(k)) + else: + raise NotImplementedError('Guess dictionary with no known model coeffs.') + + + if not hasattr(p0,'__len__'): + p0=(p0,) + + # --- Last check that p0 is within bounds + if bounds is not None: + for p,k,lb,ub in zip(p0, self.model['coeffs'].keys(), bounds[0], bounds[1]): + if pub: + raise Exception('Parameter `{}` has the guess value {}, which is larger than the upper bound ({})'.format(k,p,ub)) + # TODO potentially set it as middle of bounds + + # --- Finally, store the initial guesses in the model + self.model['coeffs_init'] = p0 + + def fit(self, func, x, y, p0=None, bounds=None, **fun_kwargs): + """ Fit model defined by a function to data (x,y) """ + # Setup function + self.set_model(func, **fun_kwargs) + # Fit data to model + self.fit_data(x, y, p0, bounds) + + def clean_data(self,x,y): + x=np.asarray(x) + y=np.asarray(y) + bNaN=~np.isnan(y) + y=y[bNaN] + x=x[bNaN] + bNaN=~np.isnan(x) + y=y[bNaN] + x=x[bNaN] + self.data['x']=x + self.data['y']=y + return x,y + + def fit_data(self, x, y, p0=None, bounds=None): + """ fit data, assuming a model is already setup""" + if self.model['model_function'] is None: + raise Exception('Call set_function first') + + # Cleaning data, and store it in object + x,y=self.clean_data(x,y) + + # nParams + if isinstance(p0 ,six.string_types): + p0=extract_key_num(p0) + if len(p0)==0: + p0=None + if p0 is not None: + if hasattr(p0,'__len__'): + nParams=len(p0) + else: + nParams=1 + elif self.model['coeffs'] is not None: + nParams=len(self.model['coeffs']) + else: + raise Exception('Initial guess `p0` needs to be provided since we cant infer the size of the model coefficients.') + if self.model['coeffs'] is not None: + if len(self.model['coeffs'])!=nParams: + raise Exception('Inconsistent dimension between model guess (size {}) and the model parameters (size {})'.format(nParams,len(self.model['coeffs']))) + + # Bounds + self.setup_bounds(bounds,nParams) + + # Initial conditions + self.setup_guess(p0,self.model['bounds'],nParams) + + # Fitting + minimize_me = lambda x, *p : self.model['model_function'](x, p, **self.model['consts']) + pfit, pcov = so.curve_fit(minimize_me, x, y, p0=self.model['coeffs_init'], bounds=self.model['bounds']) + + # --- Reporting information about the fit (after the fit) + y_fit = self.model['model_function'](x, pfit, **self.model['consts']) + self.store_fit_info(y_fit, pfit) + + # --- Return a fitted function + self.model['fitted_function'] = lambda xx: self.model['model_function'](xx, pfit, **self.model['consts']) + + def store_fit_info(self, y_fit, pfit): + # --- Reporting information about the fit (after the fit) + self.data['y_fit']=y_fit + self.model['R2'] = rsquare(self.data['y'], y_fit) + if self.model['coeffs'] is not None: + if not isinstance(self.model['coeffs'], OrderedDict): + raise Exception('Coeffs need to be of type OrderedDict') + for k,v in zip(self.model['coeffs'].keys(), pfit): + self.model['coeffs'][k]=v + + # Replace numerical values in formula + if self.model['formula'] is not None: + formula_num=self.model['formula'] + for k,v in self.model['coeffs'].items(): + formula_num = formula_num.replace('{'+k+'}',str(v)) + for k,v in self.model['consts'].items(): + formula_num = formula_num.replace('{'+k+'}',str(v)) + self.model['formula_num'] = formula_num + + def formula_num(self, fmt=None): + """ return formula with coeffs and consts evaluted numerically""" + if fmt is None: + fmt_fun = lambda x: str(x) + elif isinstance(fmt,six.string_types): + fmt_fun = lambda x: ('{'+fmt+'}').format(x) + elif callable(fmt): + fmt_fun = fmt + formula_num=self.model['formula'] + for k,v in self.model['coeffs'].items(): + formula_num = formula_num.replace('{'+k+'}',fmt_fun(v)) + for k,v in self.model['consts'].items(): + formula_num = formula_num.replace('{'+k+'}',fmt_fun(v)) + return formula_num + + + + def plot(self, x=None, fig=None, ax=None): + if x is None: + x=self.data['x'] + + sFormula = _clean_formula(self.model['formula'],latex=True) + + import matplotlib.pyplot as plt + import matplotlib.patches as mpatches + + if fig is None: + fig,ax = plt.subplots(1, 1, sharey=False, figsize=(6.4,4.8)) # (6.4,4.8) + fig.subplots_adjust(left=0.12, right=0.95, top=0.95, bottom=0.11, hspace=0.20, wspace=0.20) + + ax.plot(self.data['x'], self.data['y'], '.', label='Data') + ax.plot(x, self.model['fitted_function'](x), '-', label='Model ' + sFormula) + + # Add extra info to the legend + handles, labels = ax.get_legend_handles_labels() # get existing handles and labels + empty_patch = mpatches.Patch(color='none', label='Extra label') # create a patch with no color + for k,v in self.model['coeffs'].items(): + handles.append(empty_patch) # add new patches and labels to list + labels.append(r'${:s}$ = {}'.format(pretty_param(k),pretty_num_short(v))) + handles.append(empty_patch) # add new patches and labels to list + labels.append('$R^2$ = {}'.format(pretty_num_short(self.model['R2']))) + ax.legend(handles, labels) + + + #ax.set_xlabel('') + #ax.set_ylabel('') + return fig,ax + + def print_guessbounds(self): + s='' + p0 = self.model['coeffs_init'] + bounds = self.model['bounds'] + for i,(k,v) in enumerate(self.model['coeffs'].items()): + print( (pretty_num(bounds[0][i]),pretty_num(p0[i]), pretty_num(bounds[1][i])) ) + s+='{:15s}: {:10s} < {:10s} < {:10s}\n'.format(k, pretty_num(bounds[0][i]),pretty_num(p0[i]), pretty_num(bounds[1][i])) + print(s) + + + def __repr__(self): + s='<{} object> with fields:\n'.format(type(self).__name__) + s+=' - data, dictionary with keys: \n' + s+=' - x: [{} ... {}], n: {} \n'.format(self.data['x'][0],self.data['x'][-1],len(self.data['x'])) + s+=' - y: [{} ... {}], n: {} \n'.format(self.data['y'][0],self.data['y'][-1],len(self.data['y'])) + s+=' - model, dictionary with keys: \n' + for k,v in self.model.items(): + s=s+' - {:15s}: {}\n'.format(k,v) + return s + + +# --------------------------------------------------------------------------------} +# --- Wrapper for predefined fitters +# --------------------------------------------------------------------------------{ +class PredefinedModelFitter(ModelFitter): + def __init__(self, x=None, y=None, p0=None, bounds=None, **kwargs): + ModelFitter.__init__(self,x=None, y=None, p0=p0, bounds=bounds) # NOTE: not passing data + + self.kwargs=kwargs + + if x is not None and y is not None: + self.fit_data(x,y,p0,bounds) + + def setup_model(self): + """ + Setup model: + - guess/coeffs_init: return params in format needed for curve_fit (p0,p1,p2,p3) + - bound : bounds in format needed for curve_fit ((low0,low1,low2), (high0, high1)) + - coeffs : OrderedDict, necessary for user print + - formula : necessary for user print + """ + #self.model['coeffs'] = OrderedDict([(var,1) for i,var in enumerate(variables)]) + #self.model['formula'] = '' + #self.model['coeffs_init']=p_guess + #self.model['bounds']=bounds_guess + raise NotImplementedError('To be implemented by child class') + + def model_function(self, x, p): + raise NotImplementedError('To be implemented by child class') + + def fit_data(self, x, y, p0=None, bounds=None): + # Cleaning data + x,y=self.clean_data(x,y) + + # --- setup model + # guess initial parameters, potential bounds, and set necessary data + self.setup_model() + + # --- Minimization + minimize_me = lambda x, *p : self.model_function(x, p) + if self.model['bounds'] is None: + pfit, pcov = so.curve_fit(minimize_me, x, y, p0=self.model['coeffs_init']) + else: + pfit, pcov = so.curve_fit(minimize_me, x, y, p0=self.model['coeffs_init'], bounds=self.model['bounds']) + # --- Reporting information about the fit (after the fit) + # And Return a fitted function + y_fit = self.model_function(x, pfit) + self.model['fitted_function']=lambda xx : self.model_function(xx, pfit) + self.store_fit_info(y_fit, pfit) + + def plot_guess(self, x=None, fig=None, ax=None): + """ plotthe guess values""" + if x is None: + x=self.data['x'] + import matplotlib.pyplot as plt + if fig is None: + fig,ax = plt.subplots(1, 1, sharey=False, figsize=(6.4,4.8)) # (6.4,4.8) + fig.subplots_adjust(left=0.12, right=0.95, top=0.95, bottom=0.11, hspace=0.20, wspace=0.20) + + p_guess = self.model['coeffs_init'] + + ax.plot(self.data['x'], self.data['y'] , '.', label='Data') + ax.plot(x, self.model_function(x,p_guess), '-', label='Model at guessed parameters') + ax.legend() + + +# --------------------------------------------------------------------------------} +# --- Predefined fitters +# --------------------------------------------------------------------------------{ +class SecondOrderFitterImpulse(PredefinedModelFitter): + + def model_function(self, x, p): + return secondorder_impulse(x, p) + + def setup_model(self): + """ p = (A, omega0, zeta, B, t0) """ + self.model['coeffs'] = OrderedDict([('A',1),('omega',1),('zeta',0.01),('B',0),('t0',0)]) + self.model['formula'] = '{A}*exp(-{zeta}*{omega}*(x-{x0}))*sin({omega}*sqrt(1-{zeta}**2)))+{B}' + + # --- Guess Initial values + x, y = self.data['x'],self.data['y'] + # TODO use signal + dt = x[1]-x[0] + omega0 = main_frequency(x,y) + A = np.max(y) - np.min(y) + B = np.mean(y) + zeta = 0.1 + y_start = y[0]+0.01*A + bDeviate = np.argwhere(abs(y-y_start)>abs(y_start-y[0]))[0] + t0 = x[bDeviate[0]] + p_guess = np.array([A, omega0, zeta, B, t0]) + self.model['coeffs_init'] = p_guess + # --- Set Bounds + T = x[-1]-x[0] + dt = x[1]-x[0] + om_min = 2*np.pi/T/2 + om_max = 2*np.pi/dt/2 + b_A = (A*0.1,A*3) + b_om = (om_min,om_max) + b_zeta = (0,1) + b_B = (np.min(y),np.max(y)) + b_x0 = (np.min(x),np.max(x)) + self.model['bounds'] = ((b_A[0],b_om[0],b_zeta[0],b_B[0],b_x0[0]),(b_A[1],b_om[1],b_zeta[1],b_B[1],b_x0[1])) + #self.plot_guess(); import matplotlib.pyplot as plt; plt.show() + #self.print_guessbounds(); + +class SecondOrderFitterStep(PredefinedModelFitter): + + def model_function(self, x, p): + return secondorder_step(x, p) + + def setup_model(self): + """ p = (A, omega0, zeta, B, t0) """ + self.model['coeffs'] = OrderedDict([('A',1),('omega',1),('zeta',0.01),('B',0),('t0',0)]) + self.model['formula'] ='{A}*(1-exp(-{zeta}*{omega}*(x-{x0}))/sqrt(1-{zeta}**2) * cos({omega}*sqrt(1-{zeta}**2)-arctan({zeta}/sqrt(1-{zeta}**2)))) +{B}' + # --- Guess Initial values + x, y = self.data['x'],self.data['y'] + # TODO use signal + omega0 = main_frequency(x,y) + A = np.max(y) - np.min(y) + B = y[0] + zeta = 0.1 + y_start = y[0]+0.01*A + bDeviate = np.argwhere(abs(y-y_start)>abs(y_start-y[0]))[0] + t0 = x[bDeviate[0]] + p_guess = np.array([A, omega0, zeta, B, t0]) + self.model['coeffs_init'] = p_guess + # --- Set Bounds + T = x[-1]-x[0] + dt = x[1]-x[0] + om_min = 2*np.pi/T/2 + om_max = 2*np.pi/dt/2 + b_A = (A*0.1,A*3) + b_om = (om_min,om_max) + b_zeta = (0,1) + b_B = (np.min(y),np.max(y)) + b_x0 = (np.min(x),np.max(x)) + self.model['bounds'] = ((b_A[0],b_om[0],b_zeta[0],b_B[0],b_x0[0]),(b_A[1],b_om[1],b_zeta[1],b_B[1],b_x0[1])) + #self.plot_guess(); import matplotlib.pyplot as plt; plt.show() + #self.print_guessbounds(); + +# --------------------------------------------------------------------------------} +# --- Predefined fitter +# --------------------------------------------------------------------------------{ +class ContinuousPolynomialFitter(ModelFitter): + def __init__(self,order=None, x=None, y=None, p0=None, bounds=None): + ModelFitter.__init__(self,x=None, y=None, p0=p0, bounds=bounds) + self.setOrder(int(order)) + if order is not None and x is not None and y is not None: + self.fit_data(x,y,p0,bounds) + + def setOrder(self, order): + self.order=order + if order is not None: + variables= string.ascii_lowercase[:order+1] + self.model['coeffs'] = OrderedDict([(var,1) for i,var in enumerate(variables)]) + formula = ' + '.join(['{}*x**{}'.format('{'+var+'}',order-i) for i,var in enumerate(variables)]) + self.model['formula'] = _clean_formula(formula) + + def fit_data(self, x, y, p0=None, bounds=None): + if self.order is None: + raise Exception('Polynomial Fitter not set, call function `setOrder` to set order') + # Cleaning data + x,y=self.clean_data(x,y) + + nParams=self.order+1 + # Bounds + self.setup_bounds(bounds, nParams) # TODO + # Initial conditions + self.setup_guess(p0, bounds, nParams) # TODO + + # Fitting + pfit = np.polyfit(x,y,self.order) + + # --- Reporting information about the fit (after the fit) + y_fit = np.polyval(pfit,x) + self.store_fit_info(y_fit, pfit) + + # --- Return a fitted function + self.model['fitted_function']=lambda xx : np.polyval(pfit,xx) + + +class DiscretePolynomialFitter(ModelFitter): + def __init__(self,exponents=None, x=None, y=None, p0=None, bounds=None): + ModelFitter.__init__(self,x=None, y=None, p0=p0, bounds=bounds) + self.setExponents(exponents) + if exponents is not None and x is not None and y is not None: + self.fit_data(x,y,p0,bounds) + + def setExponents(self, exponents): + self.exponents=exponents + if exponents is not None: + #exponents=-np.sort(-np.asarray(exponents)) + self.exponents=exponents + variables= string.ascii_lowercase[:len(exponents)] + self.model['coeffs'] = OrderedDict([(var,1) for i,var in enumerate(variables)]) + formula = ' + '.join(['{}*x**{}'.format('{'+var+'}',e) for var,e in zip(variables,exponents)]) + self.model['formula'] = _clean_formula(formula) + + def fit_data(self, x, y, p0=None, bounds=None): + if self.exponents is None: + raise Exception('Polynomial Fitter not set, call function `setExponents` to set exponents') + # Cleaning data, and store it in object + x,y=self.clean_data(x,y) + + nParams=len(self.exponents) + # Bounds + self.setup_bounds(bounds, nParams) # TODO + # Initial conditions + self.setup_guess(p0, bounds, nParams) # TODO + + X_poly=np.array([]) + for i,e in enumerate(self.exponents): + if i==0: + X_poly = np.array([x**e]) + else: + X_poly = np.vstack((X_poly,x**e)) + try: + pfit = np.linalg.lstsq(X_poly.T, y, rcond=None)[0] + except: + pfit = np.linalg.lstsq(X_poly.T, y) + + # --- Reporting information about the fit (after the fit) + y_fit= np.dot(pfit, X_poly) + self.store_fit_info(y_fit, pfit) + + # --- Return a fitted function + def fitted_function(xx): + y=np.zeros(xx.shape) + for i,(e,c) in enumerate(zip(self.exponents,pfit)): + y += c*x**e + return y + self.model['fitted_function']=fitted_function + + +class SinusoidFitter(ModelFitter): + def __init__(self, physical=False, x=None, y=None, p0=None, bounds=None): + ModelFitter.__init__(self, x=None, y=None, p0=p0, bounds=bounds) + #self.setOrder(int(order)) + self.physical=physical + if physical: + self.model['coeffs'] = OrderedDict([('A',1),('f',1),('phi',0),('B',0)]) + self.model['formula'] = '{A} * sin(2*pi*({f}*x + {phi}/360)) + {B}' + else: + self.model['coeffs'] = OrderedDict([('A',1),('omega',1),('phi',0),('B',0)]) + self.model['formula'] = '{A} * sin({omega}*x + {phi}) + {B}' + + if x is not None and y is not None: + self.fit_data(x,y,p0,bounds) + + def fit_data(self, x, y, p0=None, bounds=None): + # Cleaning data + x,y=self.clean_data(x,y) + + # TODO use signal + guess_freq= main_frequency(x,y)/(2*np.pi) # [Hz] + guess_amp = np.std(y) * 2.**0.5 + guess_offset = np.mean(y) + if self.physical: + guess = np.array([guess_amp, guess_freq, 0., guess_offset]) + minimize_me = lambda x, *p : sinusoid_f(x, p) + else: + guess = np.array([guess_amp, 2.*np.pi*guess_freq, 0., guess_offset]) + minimize_me = lambda x, *p : sinusoid(x, p) + self.model['coeffs_init'] = guess + + pfit, pcov = so.curve_fit(minimize_me, x, y, p0=guess) + + # --- Reporting information about the fit (after the fit) + # And Return a fitted function + if self.physical: + y_fit = sinusoid_f(x, pfit) + self.model['fitted_function']=lambda xx : sinusoid_f(xx, pfit) + else: + y_fit = sinusoid(x, pfit) + self.model['fitted_function']=lambda xx : sinusoid(xx, pfit) + self.store_fit_info(y_fit, pfit) + + + +class GeneratorTorqueFitter(ModelFitter): + def __init__(self,x=None, y=None, p0=None, bounds=None): + ModelFitter.__init__(self,x=None, y=None, p0=p0, bounds=bounds) + +# RtGnSp, RtTq , Rgn2K , SlPc , SpdGenOn = p +# {'label':'Generator Torque', 'handle': gentorque, 'id':'predef: gentorque', +# 'formula': '{RtGnSp} , {RtTq} , {Rgn2K} , {SlPc} , {SpdGenOn}', + self.model['coeffs']= extract_key_num('RtGnSp=100 , RtTq=1000 , Rgn2K=0.01 ,SlPc=5 , SpdGenOn=0') +# 'consts' :None, +# 'bounds' :'RtGnSp=(0.1,inf) , RtTq=(1,inf), Rgn2K=(0.0,0.1) ,SlPc=(0,20) , SpdGenOn=(0,inf)'} + if x is not None and y is not None: + self.fit_data(x,y,p0,bounds) + + def fit_data(self, x, y, p0=None, bounds=None): + #nParams=5 + ## Bounds + #self.setup_bounds(bounds,nParams) # TODO + ## Initial conditions + #self.setup_guess(p0,bounds,nParams) # TODO + + # Cleaning data, and store it in object + x,y=self.clean_data(x,y) + + I = np.argsort(x) + x=x[I] + y=y[I] + + # Estimating deltas + xMin, xMax=np.min(x),np.max(x) + yMin, yMax=np.min(y),np.max(y) + DeltaX = (xMax-xMin)*0.02 + DeltaY = (yMax-yMin)*0.02 + + # Binning data + x_bin=np.linspace(xMin,xMax,min(200,len(x))) + x_lin=x_bin[0:-1]+np.diff(x_bin) + #y_lin=np.interp(x_lin,x,y) # TODO replace by bining + y_lin = np.histogram(y, x_bin, weights=y)[0]/ np.histogram(y, x_bin)[0] + y_lin, _, _ = stats.binned_statistic(x, y, statistic='mean', bins=x_bin) + x_lin, _, _ = stats.binned_statistic(x, x, statistic='mean', bins=x_bin) + bNaN=~np.isnan(y_lin) + y_lin=y_lin[bNaN] + x_lin=x_lin[bNaN] + + # --- Find good guess of parameters based on data + # SpdGenOn + iOn = np.where(y>0)[0][0] + SpdGenOn_0 = x[iOn] + SpdGenOn_Bnds = (max(x[iOn]-DeltaX,xMin), min(x[iOn]+DeltaX,xMax)) + # Slpc + Slpc_0 = 5 + Slpc_Bnds = (0,10) + # RtTq + RtTq_0 = yMax + RtTq_Bnds = (yMax-DeltaY, yMax+DeltaY) + # RtGnSp + iCloseRt = np.where(y>yMax*0.50)[0][0] + RtGnSp_0 = x[iCloseRt] + RtGnSp_Bnds = ( RtGnSp_0 -DeltaX*2, RtGnSp_0+DeltaX*2) + # Rgn2K + #print('>>>',SpdGenOn_0, RtGnSp_0) + bR2=np.logical_and(x>SpdGenOn_0, x ['a','b'] + The variables are replaced with p[0],..,p[n] in order of appearance + """ + regex = r"\{(.*?)\}" + matches = re.finditer(regex, sFormula, re.DOTALL) + formula_eval=sFormula + variables=[] + ivar=0 + for i, match in enumerate(matches): + for groupNum in range(0, len(match.groups())): + var = match.group(1) + if var not in variables: + variables.append(var) + formula_eval = formula_eval.replace('{'+match.group(1)+'}','p[{:d}]'.format(ivar)) + ivar+=1 + return variables, formula_eval + + +def extract_key_tuples(text): + """ + all=(0.1,-2),b=(inf,0), c=(-inf,0.3e+10) + """ + if text is None: + return {} + regex = re.compile(r'(?P[\w\-]+)=\((?P[0-9+epinf.-]*?),(?P[0-9+epinf.-]*?)\)($|,)') + return {match.group("key"): (float(match.group("value1")),float(match.group("value2"))) for match in regex.finditer(text.replace(' ',''))} + +def extract_key_num(text): + """ + all=0.1, b=inf, c=-0.3e+10 + """ + if text is None: + return {} + regex = re.compile(r'(?P[\w\-]+)=(?P[0-9+epinf.-]*?)($|,)') + return OrderedDict([(match.group("key"), float(match.group("value"))) for match in regex.finditer(text.replace(' ',''))]) + +def extract_key_miscnum(text): + """ + all=0.1, b=(inf,0), c=[-inf,0.3e+10,10,11]) + """ + def isint(s): + try: + int(s) + return True + except: + return False + + if text is None: + return {} + sp=re.compile('([\w]+)=').split(text.replace(' ','')) + if len(sp)<3: + return {} + sp=sp[1:] + keys = sp[0::2] + values = sp[1::2] + d={} + for (k,v) in zip(keys,values): + if v.find('(')>=0: + v=v.replace('(','').replace(')','') + v=v.split(',') + vect=tuple([float(val) for val in v if len(val.strip())>0]) + elif v.find('[')>=0: + v=v.replace('[','').replace(']','') + v=v.split(',') + vect=[int(val) if isint(val) else float(val) for val in v if len(val.strip())>0] # NOTE returning lists + elif v.find('True')>=0: + v=v.replace(',','').strip() + vect=True + elif v.find('False')>=0: + v=v.replace(',','').strip() + vect=False + else: + v=v.replace(',','').strip() + vect=int(v) if isint(v) else float(v) + d[k]=vect + return d + +def set_common_keys(dict_target, dict_source): + """ Set a dictionary using another one, missing keys in source dictionary are reported""" + keys_missing=[] + for k in dict_target.keys(): + if k in dict_source.keys(): + dict_target[k]=dict_source[k] + else: + keys_missing.append(k) + return dict_target, keys_missing + +def _clean_formula(s, latex=False): + s = s.replace('+-','-').replace('**1','').replace('*x**0','') + s = s.replace('np.','') + if latex: + #s = s.replace('{','$').replace('}','$') + s = s.replace('phi',r'\phi') + s = s.replace('alpha',r'\alpha') + s = s.replace('beta' ,r'\alpha') + s = s.replace('zeta' ,r'\zeta') + s = s.replace('mu' ,r'\mu' ) + s = s.replace('pi' ,r'\pi' ) + s = s.replace('sigma',r'\sigma') + s = s.replace('omega',r'\omega') + s = s.replace('_ref',r'_{ref}') # make this general + s = s.replace(r'(',r'{(') + s = s.replace(r')',r')}') + s = s.replace(r'**',r'^') + s = s.replace(r'*', '') + s = s.replace('sin',r'\sin') + s = s.replace('exp',r'\exp') + s = s.replace('sqrt',r'\sqrt') + s = r'$'+s+r'$' + else: + s = s.replace('{','').replace('}','') + return s + + +def main_frequency(t,y): + """ + Returns main frequency of a signal + NOTE: this tool below to welib.tools.signal, but put here for convenience + """ + dt = t[1]-t[0] # assume uniform spacing of time and frequency + om = np.fft.fftfreq(len(t), (dt))*2*np.pi + Fyy = abs(np.fft.fft(y)) + omega = abs(om[np.argmax(Fyy[1:])+1]) # exclude the zero frequency (mean) + return omega + +def rsquare(y, f): + """ Compute coefficient of determination of data fit model and RMSE + [r2] = rsquare(y,f) + RSQUARE computes the coefficient of determination (R-square) value from + actual data Y and model data F. + INPUTS + y : Actual data + f : Model fit + OUTPUT + R2 : Coefficient of determination + """ + # Compare inputs + if not np.all(y.shape == f.shape) : + raise Exception('Y and F must be the same size') + # Check for NaN + tmp = np.logical_not(np.logical_or(np.isnan(y),np.isnan(f))) + y = y[tmp] + f = f[tmp] + R2 = max(0,1-np.sum((y-f)**2)/np.sum((y-np.mean(y))** 2)) + return R2 + +def pretty_param(s): + if s in ['alpha','beta','delta','gamma','epsilon','zeta','lambda','mu','nu','pi','rho','sigma','phi','psi','omega']: + s = r'\{}'.format(s) + s = s.replace('_ref',r'_{ref}') # make this general.. + return s + +def pretty_num(x): + if abs(x)<1000 and abs(x)>1e-4: + return "{:9.4f}".format(x) + else: + return '{:.3e}'.format(x) + +def pretty_num_short(x,digits=3): + if digits==4: + if abs(x)<1000 and abs(x)>1e-1: + return "{:.4f}".format(x) + else: + return "{:.4e}".format(x) + elif digits==3: + if abs(x)<1000 and abs(x)>1e-1: + return "{:.3f}".format(x) + else: + return "{:.3e}".format(x) + elif digits==2: + if abs(x)<1000 and abs(x)>1e-1: + return "{:.2f}".format(x) + else: + return "{:.2e}".format(x) + + +if __name__ == '__main__': + # --- Writing example models to file for pyDatView tests + a,b,c = 2.0, 3.0, 4.0 + u_ref,z_ref,alpha=10,12,0.12 + mu,sigma=0.5,1.2 + x = np.linspace(0.1,30,20) + A,k,B=0.5,1.2,10 + y_exp=expdecay(x,(A,k,B)) + A, k = 10, 2.3, + y_weib=weibull_pdf(x,(A,k)) + y_log=logarithmic(x,(a,b)) + exponents=[0,3,5] + y_poly = a + b*x**3 + c*x**5 + y_power=powerlaw_all(x,(alpha,u_ref,z_ref)) + y_gauss=gaussian(x,(mu,sigma)) + A= 101; B= -200.5; omega = 0.4; phi = np.pi/3 + y_sin=sinusoid(x,(A,omega,phi,B)) + np.random.normal(0, 0.1, len(x)) + M=np.column_stack((x,y_poly,y_power,y_gauss,y_gauss+10,y_weib,y_exp,y_log,y_sin)) + np.savetxt('../TestFit.csv',M,header='x,poly,power,gauss,gauss_off,weib,expdecay,log,sin',delimiter=',') diff --git a/pydatview/tools/signal.py b/pydatview/tools/signal.py index e00c3ca..be7011f 100644 --- a/pydatview/tools/signal.py +++ b/pydatview/tools/signal.py @@ -1,586 +1,657 @@ -from __future__ import division -import numpy as np -from numpy.random import rand -import pandas as pd - - -# --- List of available filters -FILTERS=[ - {'name':'Moving average','param':100,'paramName':'Window Size','paramRange':[0,100000],'increment':1}, - {'name':'Low pass 1st order','param':1.0,'paramName':'Cutoff Freq.','paramRange':[0.0001,100000],'increment':0.1}, - {'name':'High pass 1st order','param':1.0,'paramName':'Cutoff Freq.','paramRange':[0.0001,100000],'increment':0.1}, -] - -SAMPLERS=[ - {'name':'Replace', 'param':[], 'paramName':'New x'}, - {'name':'Insert', 'param':[], 'paramName':'Insert list'}, - {'name':'Remove', 'param':[], 'paramName':'Remove list'}, - {'name':'Every n', 'param':2 , 'paramName':'n'}, - {'name':'Time-based', 'param':0.01 , 'paramName':'Sample time (s)'}, - {'name':'Delta x', 'param':0.1, 'paramName':'dx'}, -] - - - -def reject_outliers(y, x=None, m = 2., replaceNaN=True): - """ Reject outliers: - If replaceNaN is true: they are replaced by NaN - Otherwise they are removed - """ - if m==0: - # No rejection... - pass - else: - dd = np.abs(y - np.nanmedian(y)) - mdev = np.nanmedian(dd) - if mdev: - ss = dd/mdev - b=ss=0, j< len(xp)-1) - bLower =j<0 - bUpper =j>=len(xp)-1 - jOK = j[bOK] - #import pdb; pdb.set_trace() - dd[bOK] = (x[bOK] - xp[jOK]) / (xp[jOK + 1] - xp[jOK]) - jBef=j - jAft=j+1 - # - # Use first and last values for anything beyond xp - jAft[bUpper] = len(xp)-1 - jBef[bUpper] = len(xp)-1 - jAft[bLower] = 0 - jBef[bLower] = 0 - if extrap=='bounded': - pass - # OK - elif extrap=='nan': - dd[~bOK] = np.nan - else: - raise NotImplementedError() - - return (1 - dd) * fp[:,jBef] + fp[:,jAft] * dd - -def resample_interp(x_old, x_new, y_old=None, df_old=None): - #x_new=np.sort(x_new) - if df_old is not None: - # --- Method 1 (pandas) - #df_new = df_old.copy() - #df_new = df_new.set_index(x_old) - #df_new = df_new.reindex(df_new.index | x_new) - #df_new = df_new.interpolate().loc[x_new] - #df_new = df_new.reset_index() - # --- Method 2 interp storing dx - data_new=multiInterp(x_new, x_old, df_old.values.T) - df_new = pd.DataFrame(data=data_new.T, columns=df_old.columns.values) - return x_new, df_new - - if y_old is not None: - return x_new, np.interp(x_new, x_old, y_old) - - -def applySamplerDF(df_old, x_col, sampDict): - x_old=df_old[x_col].values - x_new, df_new =applySampler(x_old, y_old=None, sampDict=sampDict, df_old=df_old) - df_new[x_col]=x_new - return df_new - - -def applySampler(x_old, y_old, sampDict, df_old=None): - - param = np.asarray(sampDict['param']).ravel() - - if sampDict['name']=='Replace': - if len(param)==0: - raise Exception('Error: At least one value is required to resample the x values with') - x_new = param - return resample_interp(x_old, x_new, y_old, df_old) - - elif sampDict['name']=='Insert': - if len(param)==0: - raise Exception('Error: provide a list of values to insert') - x_new = np.sort(np.concatenate((x_old.ravel(),param))) - return resample_interp(x_old, x_new, y_old, df_old) - - elif sampDict['name']=='Remove': - I=[] - if len(param)==0: - raise Exception('Error: provide a list of values to remove') - for d in param: - Ifound= np.where(np.abs(x_old-d)<1e-3)[0] - if len(Ifound)>0: - I+=list(Ifound.ravel()) - x_new=np.delete(x_old,I) - return resample_interp(x_old, x_new, y_old, df_old) - - elif sampDict['name']=='Delta x': - if len(param)==0: - raise Exception('Error: provide value for dx') - dx = param[0] - x_new = np.arange(x_old[0], x_old[-1]+dx/2, dx) - return resample_interp(x_old, x_new, y_old, df_old) - - elif sampDict['name']=='Every n': - if len(param)==0: - raise Exception('Error: provide value for n') - n = int(param[0]) - if n==0: - raise Exception('Error: |n| should be at least 1') - - x_new=x_old[::n] - if df_old is not None: - return x_new, (df_old.copy()).iloc[::n,:] - if y_old is not None: - return x_new, y_old[::n] - - elif sampDict['name'] == 'Time-based': - if len(param) == 0: - raise Exception('Error: provide value for new sampling time') - sample_time = float(param[0]) - if sample_time <= 0: - raise Exception('Error: sample time must be positive') - - time_index = pd.TimedeltaIndex(x_old, unit="S") - x_new = pd.Series(x_old, index=time_index).resample("{:f}S".format(sample_time)).mean().interpolate().values - - if df_old is not None: - df_new = df_old.set_index(time_index, inplace=False).resample("{:f}S".format(sample_time)).mean() - df_new = df_new.interpolate().reset_index(drop=True) - return x_new, df_new - if y_old is not None: - y_new = pd.Series(y_old, index=time_index).resample("{:f}S".format(sample_time)).mean() - y_new = y_new.interpolate().values - return x_new, y_new - - else: - raise NotImplementedError('{}'.format(sampDict)) - pass - -# --------------------------------------------------------------------------------} -# --- Filters -# --------------------------------------------------------------------------------{ -# def moving_average(x, w): -# #t_new = np.arange(0,Tmax,dt) -# #nt = len(t_new) -# #nw=400 -# #u_new = moving_average(np.floor(np.linspace(0,3,nt+nw-1))*3+3.5, nw) -# return np.convolve(x, np.ones(w), 'valid') / w -# def moving_average(x,N,mode='same'): -# y=np.convolve(x, np.ones((N,))/N, mode=mode) -# return y -def moving_average(a, n=3) : - """ - perform moving average, return a vector of same length as input - - NOTE: also in kalman.filters - """ - a = a.ravel() - a = np.concatenate(([a[0]]*(n-1),a)) # repeating first values - ret = np.cumsum(a, dtype = float) - ret[n:] = ret[n:] - ret[:-n] - ret=ret[n - 1:] / n - return ret - -def lowpass1(y, dt, fc=3) : - """ - 1st order low pass filter - """ - tau=1/(2*np.pi*fc) - alpha=dt/(tau+dt) - y_filt=np.zeros(y.shape) - y_filt[0]=y[0] - for i in np.arange(1,len(y)): - y_filt[i]=alpha*y[i] + (1-alpha)*y_filt[i-1] - return y_filt - -def highpass1(y, dt, fc=3) : - """ - 1st order high pass filter - """ - tau=1/(2*np.pi*fc) - alpha=tau/(tau+dt) - y_filt=np.zeros(y.shape) - y_filt[0]=0 - for i in np.arange(1,len(y)): - y_filt[i]=alpha*y_filt[i-1] + alpha*(y[i]-y[i-1]) - m0=np.mean(y) - m1=np.mean(y_filt) - y_filt+=m0-m1 - return y_filt - - -def applyFilter(x, y,filtDict): - if filtDict['name']=='Moving average': - return moving_average(y, n=np.round(filtDict['param']).astype(int)) - elif filtDict['name']=='Low pass 1st order': - dt = x[1]-x[0] - return lowpass1(y, dt=dt, fc=filtDict['param']) - elif filtDict['name']=='High pass 1st order': - dt = x[1]-x[0] - return highpass1(y, dt=dt, fc=filtDict['param']) - else: - raise NotImplementedError('{}'.format(filtDict)) - -# --------------------------------------------------------------------------------} -# --- -# --------------------------------------------------------------------------------{ -def zero_crossings(y,x=None,direction=None): - """ - Find zero-crossing points in a discrete vector, using linear interpolation. - - direction: 'up' or 'down', to select only up-crossings or down-crossings - - returns: - x values xzc such that y(yzc)==0 - indexes izc, such that the zero is between y[izc] (excluded) and y[izc+1] (included) - - if direction is not provided, also returns: - sign, equal to 1 for up crossing - """ - if x is None: - x=np.arange(len(y)) - - if np.any((x[1:] - x[0:-1]) <= 0.0): - raise Exception('x values need to be in ascending order') - - # Indices before zero-crossing - iBef = np.where(y[1:]*y[0:-1] < 0.0)[0] - - # Find the zero crossing by linear interpolation - xzc = x[iBef] - y[iBef] * (x[iBef+1] - x[iBef]) / (y[iBef+1] - y[iBef]) - - # Selecting points that are exactly 0 and where neighbor change sign - iZero = np.where(y == 0.0)[0] - iZero = iZero[np.where((iZero > 0) & (iZero < x.size-1))] - iZero = iZero[np.where(y[iZero-1]*y[iZero+1] < 0.0)] - - # Concatenate - xzc = np.concatenate((xzc, x[iZero])) - iBef = np.concatenate((iBef, iZero)) - - # Sort - iSort = np.argsort(xzc) - xzc, iBef = xzc[iSort], iBef[iSort] - - # Return up-crossing, down crossing or both - sign = np.sign(y[iBef+1]-y[iBef]) - if direction == 'up': - I= np.where(sign==1)[0] - return xzc[I],iBef[I] - elif direction == 'down': - I= np.where(sign==-1)[0] - return xzc[I],iBef[I] - elif direction is not None: - raise Exception('Direction should be either `up` or `down`') - return xzc, iBef, sign - - -# --------------------------------------------------------------------------------} -# --- Correlation -# --------------------------------------------------------------------------------{ -def correlation(x, nMax=80, dt=1, method='manual'): - """ - Compute auto correlation of a signal - """ - nvec = np.arange(0,nMax) - sigma2 = np.var(x) - R = np.zeros(nMax) - R[0] =1 - for i,nDelay in enumerate(nvec[1:]): - R[i+1] = np.mean( x[0:-nDelay] * x[nDelay:] ) / sigma2 - - tau = nvec*dt - return R, tau - - -def correlated_signal(coeff, n=1000, seed=None): - """ - Create a correlated random signal of length `n` based on the correlation coefficient `coeff` - value[t] = coeff * value[t-1] + (1-coeff) * random - """ - if coeff<0 or coeff>1: - raise Exception('Correlation coefficient should be between 0 and 1') - if seed is not None: - np.random.seed(seed) - - x = np.zeros(n) - rvec = rand(n) - x[0] = rvec[0] - for m in np.arange(1,n): - x[m] = coeff*x[m-1] + (1-coeff)*rvec[m] - x-=np.mean(x) - return x - - -def find_time_offset(t, f, g, outputAll=False): - """ - Find time offset between two signals (may be negative) - - t_offset = find_time_offset(t, f, g) - f(t+t_offset) ~= g(t) - - """ - import scipy - from scipy.signal import correlate - # Remove mean and normalize by std - f = f.copy() - g = g.copy() - f -= f.mean() - g -= g.mean() - f /= f.std() - g /= g.std() - - # Find cross-correlation - xcorr = correlate(f, g) - - # Lags - n = len(f) - dt = t[1]-t[0] - lag = np.arange(1-n, n)*dt - - # Time offset is located at maximum correlation - t_offset = lag[xcorr.argmax()] - - if outputAll: - return t_offset, lag, xcorr - else: - return t_offset - -def sine_approx(t, x, method='least_square'): - """ - Sinusoidal approximation of input signal x - """ - if method=='least_square': - from welib.tools.curve_fitting import fit_sinusoid - y_fit, pfit, fitter = fit_sinusoid(t, x) - omega = fitter.model['coeffs']['omega'] - A = fitter.model['coeffs']['A'] - phi = fitter.model['coeffs']['phi'] - x2 = y_fit - else: - raise NotImplementedError() - - - return x2, omega, A, phi - - -# --------------------------------------------------------------------------------} -# --- Convolution -# --------------------------------------------------------------------------------{ -def convolution_integral(time, f, g): - """ - Compute convolution integral: - f * g = \int 0^t f(tau) g(t-tau) dtau = g * f - For now, only works for uniform time vector, an exception is raised otherwise - """ - dt = time[1]-time[0] - if len(np.unique(np.around(np.diff(time)/dt,3)))>1: - raise Exception('Convolution integral implemented for uniform time vector') - - return np.convolve(f.ravel(), g.ravel() )[:len(time)]*dt - - -# --------------------------------------------------------------------------------} -# --- Intervals/peaks -# --------------------------------------------------------------------------------{ -def intervals(b, min_length=1, forgivingJump=True, removeSmallRel=True, removeSmallFact=0.1, mergeCloseRel=False, mergeCloseFact=0.2): - """ - Describe intervals from a boolean vector where intervals are indicated by True - - INPUT: - - b : a logical vector, where 1 means, I'm in an interval. - - min_length: if provided, do not return intervals of length < min_length - - forgivingJump: if true, merge intervals that are separated by a distance < min_length - - removeSmallRel: remove intervals that have a small length compared to the max length of intervals - - removeSmallFact: factor used for removeSmallRel - - mergeCloseRel: merge intervals that are closer than a fraction of the typical distance between intervals - - OUTPUTS: - - IStart : ending indices - - IEnd : ending indices - - Length: interval lenghts (IEnd-IStart+1) - - IStart, IEnd, Lengths = intervals([False, True, True, False, True, True, True, False]) - np.testing.assert_equal(IStart , np.array([1,4])) - np.testing.assert_equal(IEnd , np.array([2,6])) - np.testing.assert_equal(Lengths, np.array([2,3])) - """ - b = np.asarray(b) - total = np.sum(b) - - min_length=max(min_length,1) - if forgivingJump: - min_jump=min_length - else: - min_jump=1 - - if total==0: - IStart = np.array([]) - IEnd = np.array([]) - Lengths= np.array([]) - return IStart, IEnd, Lengths - elif total==1: - i = np.where(b)[0][0] - IStart = np.array([i]) - IEnd = np.array([i]) - Lengths= np.array([1]) - else: - n = len(b) - Idx = np.arange(n)[b] - delta_Idx=np.diff(Idx) - jumps =np.where(delta_Idx>min_jump)[0] - if len(jumps)==0: - IStart = np.array([Idx[0]]) - IEnd = np.array([Idx[-1]]) - else: - istart=Idx[0] - jumps=np.concatenate(([-1],jumps,[len(Idx)-1])) - IStart = Idx[jumps[:-1]+1] # intervals start right after a jump - IEnd = Idx[jumps[1:]] # intervals stops at jump - Lengths = IEnd-IStart+1 - - # Removing intervals smaller than min_length - bKeep = Lengths>=min_length - IStart = IStart[bKeep] - IEnd = IEnd[bKeep] - Lengths = Lengths[bKeep] - # Removing intervals smaller than less than a fraction of the max interval - if removeSmallRel: - bKeep = Lengths>=removeSmallFact*np.max(Lengths) - IStart = IStart[bKeep] - IEnd = IEnd[bKeep] - Lengths = Lengths[bKeep] - - # Distances between intervals - if mergeCloseRel: - if len(IStart)<=2: - pass - else: - D = IStart[1:]-IEnd[0:-1] - #print('D',D,np.max(D),int(np.max(D) * mergeCloseFact)) - min_length = max(int(np.max(D) * mergeCloseFact), min_length) - if min_length<=1: - pass - else: - #print('Readjusting min_length to {} to accomodate for max interval spacing of {:.0f}'.format(min_length, np.mean(D))) - return intervals(b, min_length=min_length, forgivingJump=True, removeSmallRel=removeSmallRel, removeSmallFact=removeSmallFact, mergeCloseRel=False) - return IStart, IEnd, Lengths - -def peaks(x, threshold=0.3, threshold_abs=True, method='intervals', min_length=3, - mergeCloseRel=True, returnIntervals=False): - """ - Find peaks in a signal, above a given threshold - INPUTS: - - x : 1d-array, signal - - threshold : scalar, absolute or relative threshold beyond which peaks are looked for - relative threshold are proportion of the max-min of the signal (between 0-1) - - threshold_abs : boolean, specify whether the threshold is absolute or relative - - method : string, selects which method is used to find the peaks, between: - - 'interval' : one peak per interval above the threshold - - 'derivative': uses derivative to find maxima, may return more than one per interval - - min_length: - - if 'interval' method is used: minimum interval - - if 'derivative' method is used: minimum distance between two peaks - - OPTIONS for interval method: - - mergeCloseRel: logical, if True, attempts to merge intervals that are close to each other compare to the typical interval spacing - set to False if all peaks are wanted - - returnIntervals: logical, if true, return intervals used for interval method - OUTPUTS: - - I : index of the peaks - -[IStart, IEnd] if return intervals is true, see function `intervals` - """ - if not threshold_abs: - threshold = threshold * (np.max(y) - np.min(y)) + np.min(y) - - if method =='intervals': - IStart, IEnd, Lengths = intervals(x>threshold, min_length=min_length, mergeCloseRel=mergeCloseRel) - I = np.array([iS if L==1 else np.argmax(x[iS:iE+1])+iS for iS,iE,L in zip(IStart,IEnd,Lengths)]) - if returnIntervals: - return I, IStart, IEnd - else: - return I - - elif method =='derivative': - I = indexes(x, thres=threshold, thres_abs=True, min_dist=min_length) - return I - else: - raise NotImplementedError('Method {}'.format(method)) - - - -# --------------------------------------------------------------------------------} -# --- Simple signals -# --------------------------------------------------------------------------------{ -def step(time, tStep=0, valueAtStep=0, amplitude=1): - """ - returns a step function: - 0 if ttStep - valueAtStep if t==tStep - """ - return np.heaviside(time-tStep, valueAtStep)*amplitude - - - -if __name__=='__main__': - import numpy as np - import matplotlib.pyplot as plt - - # Input - dt = 1 - n = 10000 - coeff = 0.95 # 1:full corr, 00-corr - nMax = 180 - # Create a correlated time series - tvec = np.arange(0,n)*dt - ts = correlated_signal(coeff, n) - # --- Compute correlation coefficient - R, tau = correlation(x, nMax=nMax) - fig,axes = plt.subplots(2, 1, sharey=False, figsize=(6.4,4.8)) # (6.4,4.8) - fig.subplots_adjust(left=0.12, right=0.95, top=0.95, bottom=0.11, hspace=0.20, wspace=0.20) - ax=axes[0] - # Plot time series - ax.plot(tvec,ts) - ax.set_xlabel('t [s]') - ax.set_ylabel('u [m/s]') - ax.tick_params(direction='in') - # Plot correlation - ax=axes[1] - ax.plot(tau, R ,'b-o', label='computed') - ax.plot(tau, coeff**(tau/dt) , 'r--' ,label='coeff^{tau/dt}') # analytical coeff^n trend - ax.set_xlabel(r'$\tau$ [s]') - ax.set_ylabel(r'$R(\tau)$ [-]') - ax.legend() - plt.show() - - - - - - +from __future__ import division +import numpy as np +from numpy.random import rand +import pandas as pd + + +# --- List of available filters +FILTERS=[ + {'name':'Moving average','param':100,'paramName':'Window Size','paramRange':[0,100000],'increment':1}, + {'name':'Low pass 1st order','param':1.0,'paramName':'Cutoff Freq.','paramRange':[0.0001,100000],'increment':0.1}, + {'name':'High pass 1st order','param':1.0,'paramName':'Cutoff Freq.','paramRange':[0.0001,100000],'increment':0.1}, +] + +SAMPLERS=[ + {'name':'Replace', 'param':[], 'paramName':'New x'}, + {'name':'Insert', 'param':[], 'paramName':'Insert list'}, + {'name':'Remove', 'param':[], 'paramName':'Remove list'}, + {'name':'Every n', 'param':2 , 'paramName':'n'}, + {'name':'Time-based', 'param':0.01 , 'paramName':'Sample time (s)'}, + {'name':'Delta x', 'param':0.1, 'paramName':'dx'}, +] + + + +def reject_outliers(y, x=None, m = 2., replaceNaN=True): + """ Reject outliers: + If replaceNaN is true: they are replaced by NaN + Otherwise they are removed + """ + if m==0: + # No rejection... + pass + else: + dd = np.abs(y - np.nanmedian(y)) + mdev = np.nanmedian(dd) + if mdev: + ss = dd/mdev + b=ss=0, j< len(xp)-1) + bLower =j<0 + bUpper =j>=len(xp)-1 + jOK = j[bOK] + #import pdb; pdb.set_trace() + dd[bOK] = (x[bOK] - xp[jOK]) / (xp[jOK + 1] - xp[jOK]) + jBef=j + jAft=j+1 + # + # Use first and last values for anything beyond xp + jAft[bUpper] = len(xp)-1 + jBef[bUpper] = len(xp)-1 + jAft[bLower] = 0 + jBef[bLower] = 0 + if extrap=='bounded': + pass + # OK + elif extrap=='nan': + dd[~bOK] = np.nan + else: + raise NotImplementedError() + + return (1 - dd) * fp[:,jBef] + fp[:,jAft] * dd + +def resample_interp(x_old, x_new, y_old=None, df_old=None): + #x_new=np.sort(x_new) + if df_old is not None: + # --- Method 1 (pandas) + #df_new = df_old.copy() + #df_new = df_new.set_index(x_old) + #df_new = df_new.reindex(df_new.index | x_new) + #df_new = df_new.interpolate().loc[x_new] + #df_new = df_new.reset_index() + # --- Method 2 interp storing dx + data_new=multiInterp(x_new, x_old, df_old.values.T) + df_new = pd.DataFrame(data=data_new.T, columns=df_old.columns.values) + return x_new, df_new + + if y_old is not None: + return x_new, np.interp(x_new, x_old, y_old) + + +def applySamplerDF(df_old, x_col, sampDict): + x_old=df_old[x_col].values + x_new, df_new =applySampler(x_old, y_old=None, sampDict=sampDict, df_old=df_old) + df_new[x_col]=x_new + return df_new + + +def applySampler(x_old, y_old, sampDict, df_old=None): + + param = np.asarray(sampDict['param']).ravel() + + if sampDict['name']=='Replace': + if len(param)==0: + raise Exception('Error: At least one value is required to resample the x values with') + x_new = param + return resample_interp(x_old, x_new, y_old, df_old) + + elif sampDict['name']=='Insert': + if len(param)==0: + raise Exception('Error: provide a list of values to insert') + x_new = np.sort(np.concatenate((x_old.ravel(),param))) + return resample_interp(x_old, x_new, y_old, df_old) + + elif sampDict['name']=='Remove': + I=[] + if len(param)==0: + raise Exception('Error: provide a list of values to remove') + for d in param: + Ifound= np.where(np.abs(x_old-d)<1e-3)[0] + if len(Ifound)>0: + I+=list(Ifound.ravel()) + x_new=np.delete(x_old,I) + return resample_interp(x_old, x_new, y_old, df_old) + + elif sampDict['name']=='Delta x': + if len(param)==0: + raise Exception('Error: provide value for dx') + dx = param[0] + x_new = np.arange(x_old[0], x_old[-1]+dx/2, dx) + return resample_interp(x_old, x_new, y_old, df_old) + + elif sampDict['name']=='Every n': + if len(param)==0: + raise Exception('Error: provide value for n') + n = int(param[0]) + if n==0: + raise Exception('Error: |n| should be at least 1') + + x_new=x_old[::n] + if df_old is not None: + return x_new, (df_old.copy()).iloc[::n,:] + if y_old is not None: + return x_new, y_old[::n] + + elif sampDict['name'] == 'Time-based': + if len(param) == 0: + raise Exception('Error: provide value for new sampling time') + sample_time = float(param[0]) + if sample_time <= 0: + raise Exception('Error: sample time must be positive') + + time_index = pd.TimedeltaIndex(x_old, unit="S") + x_new = pd.Series(x_old, index=time_index).resample("{:f}S".format(sample_time)).mean().interpolate().values + + if df_old is not None: + df_new = df_old.set_index(time_index, inplace=False).resample("{:f}S".format(sample_time)).mean() + df_new = df_new.interpolate().reset_index(drop=True) + return x_new, df_new + if y_old is not None: + y_new = pd.Series(y_old, index=time_index).resample("{:f}S".format(sample_time)).mean() + y_new = y_new.interpolate().values + return x_new, y_new + + else: + raise NotImplementedError('{}'.format(sampDict)) + pass + +# --------------------------------------------------------------------------------} +# --- Filters +# --------------------------------------------------------------------------------{ +# def moving_average(x, w): +# #t_new = np.arange(0,Tmax,dt) +# #nt = len(t_new) +# #nw=400 +# #u_new = moving_average(np.floor(np.linspace(0,3,nt+nw-1))*3+3.5, nw) +# return np.convolve(x, np.ones(w), 'valid') / w +# def moving_average(x,N,mode='same'): +# y=np.convolve(x, np.ones((N,))/N, mode=mode) +# return y +def moving_average(a, n=3) : + """ + perform moving average, return a vector of same length as input + + NOTE: also in kalman.filters + """ + a = a.ravel() + a = np.concatenate(([a[0]]*(n-1),a)) # repeating first values + ret = np.cumsum(a, dtype = float) + ret[n:] = ret[n:] - ret[:-n] + ret=ret[n - 1:] / n + return ret + +def lowpass1(y, dt, fc=3) : + """ + 1st order low pass filter + """ + tau=1/(2*np.pi*fc) + alpha=dt/(tau+dt) + y_filt=np.zeros(y.shape) + y_filt[0]=y[0] + for i in np.arange(1,len(y)): + y_filt[i]=alpha*y[i] + (1-alpha)*y_filt[i-1] + return y_filt + +def highpass1(y, dt, fc=3) : + """ + 1st order high pass filter + """ + tau=1/(2*np.pi*fc) + alpha=tau/(tau+dt) + y_filt=np.zeros(y.shape) + y_filt[0]=0 + for i in np.arange(1,len(y)): + y_filt[i]=alpha*y_filt[i-1] + alpha*(y[i]-y[i-1]) + m0=np.mean(y) + m1=np.mean(y_filt) + y_filt+=m0-m1 + return y_filt + + +def applyFilter(x, y,filtDict): + if filtDict['name']=='Moving average': + return moving_average(y, n=np.round(filtDict['param']).astype(int)) + elif filtDict['name']=='Low pass 1st order': + dt = x[1]-x[0] + return lowpass1(y, dt=dt, fc=filtDict['param']) + elif filtDict['name']=='High pass 1st order': + dt = x[1]-x[0] + return highpass1(y, dt=dt, fc=filtDict['param']) + else: + raise NotImplementedError('{}'.format(filtDict)) + +# --------------------------------------------------------------------------------} +# --- +# --------------------------------------------------------------------------------{ +def zero_crossings(y,x=None,direction=None): + """ + Find zero-crossing points in a discrete vector, using linear interpolation. + + direction: 'up' or 'down', to select only up-crossings or down-crossings + + returns: + x values xzc such that y(yzc)==0 + indexes izc, such that the zero is between y[izc] (excluded) and y[izc+1] (included) + + if direction is not provided, also returns: + sign, equal to 1 for up crossing + """ + if x is None: + x=np.arange(len(y)) + + if np.any((x[1:] - x[0:-1]) <= 0.0): + raise Exception('x values need to be in ascending order') + + # Indices before zero-crossing + iBef = np.where(y[1:]*y[0:-1] < 0.0)[0] + + # Find the zero crossing by linear interpolation + xzc = x[iBef] - y[iBef] * (x[iBef+1] - x[iBef]) / (y[iBef+1] - y[iBef]) + + # Selecting points that are exactly 0 and where neighbor change sign + iZero = np.where(y == 0.0)[0] + iZero = iZero[np.where((iZero > 0) & (iZero < x.size-1))] + iZero = iZero[np.where(y[iZero-1]*y[iZero+1] < 0.0)] + + # Concatenate + xzc = np.concatenate((xzc, x[iZero])) + iBef = np.concatenate((iBef, iZero)) + + # Sort + iSort = np.argsort(xzc) + xzc, iBef = xzc[iSort], iBef[iSort] + + # Return up-crossing, down crossing or both + sign = np.sign(y[iBef+1]-y[iBef]) + if direction == 'up': + I= np.where(sign==1)[0] + return xzc[I],iBef[I] + elif direction == 'down': + I= np.where(sign==-1)[0] + return xzc[I],iBef[I] + elif direction is not None: + raise Exception('Direction should be either `up` or `down`') + return xzc, iBef, sign + + +# --------------------------------------------------------------------------------} +# --- Correlation +# --------------------------------------------------------------------------------{ +def correlation(x, nMax=80, dt=1, method='manual'): + """ + Compute auto correlation of a signal + """ + nvec = np.arange(0,nMax) + sigma2 = np.var(x) + R = np.zeros(nMax) + R[0] =1 + for i,nDelay in enumerate(nvec[1:]): + R[i+1] = np.mean( x[0:-nDelay] * x[nDelay:] ) / sigma2 + + tau = nvec*dt + return R, tau + + +def correlated_signal(coeff, n=1000, seed=None): + """ + Create a correlated random signal of length `n` based on the correlation coefficient `coeff` + value[t] = coeff * value[t-1] + (1-coeff) * random + """ + if coeff<0 or coeff>1: + raise Exception('Correlation coefficient should be between 0 and 1') + if seed is not None: + np.random.seed(seed) + + x = np.zeros(n) + rvec = rand(n) + x[0] = rvec[0] + for m in np.arange(1,n): + x[m] = coeff*x[m-1] + (1-coeff)*rvec[m] + x-=np.mean(x) + return x + + +def find_time_offset(t, f, g, outputAll=False): + """ + Find time offset between two signals (may be negative) + + t_offset = find_time_offset(t, f, g) + f(t+t_offset) ~= g(t) + + """ + import scipy + from scipy.signal import correlate + # Remove mean and normalize by std + f = f.copy() + g = g.copy() + f -= f.mean() + g -= g.mean() + f /= f.std() + g /= g.std() + + # Find cross-correlation + xcorr = correlate(f, g) + + # Lags + n = len(f) + dt = t[1]-t[0] + lag = np.arange(1-n, n)*dt + + # Time offset is located at maximum correlation + t_offset = lag[xcorr.argmax()] + + if outputAll: + return t_offset, lag, xcorr + else: + return t_offset + +def sine_approx(t, x, method='least_square'): + """ + Sinusoidal approximation of input signal x + """ + if method=='least_square': + from welib.tools.curve_fitting import fit_sinusoid + y_fit, pfit, fitter = fit_sinusoid(t, x) + omega = fitter.model['coeffs']['omega'] + A = fitter.model['coeffs']['A'] + phi = fitter.model['coeffs']['phi'] + x2 = y_fit + else: + raise NotImplementedError() + + + return x2, omega, A, phi + + +# --------------------------------------------------------------------------------} +# --- Convolution +# --------------------------------------------------------------------------------{ +def convolution_integral(time, f, g, method='auto'): + """ + Compute convolution integral: + f * g = \int 0^t f(tau) g(t-tau) dtau = g * f + For now, only works for uniform time vector, an exception is raised otherwise + + method=['auto','direct','fft'], + see scipy.signal.convolve + see scipy.signal.fftconvolve + """ + from scipy.signal import convolve + dt = time[1]-time[0] + if len(np.unique(np.around(np.diff(time)/dt,3)))>1: + raise Exception('Convolution integral implemented for uniform time vector') + + #np.convolve(f.ravel(), g.ravel() )[:len(time)]*dt + return convolve(f.ravel(), g.ravel() )[:len(time)]*dt + + +# --------------------------------------------------------------------------------} +# --- Intervals/peaks +# --------------------------------------------------------------------------------{ +def intervals(b, min_length=1, forgivingJump=True, removeSmallRel=True, removeSmallFact=0.1, mergeCloseRel=False, mergeCloseFact=0.2): + """ + Describe intervals from a boolean vector where intervals are indicated by True + + INPUT: + - b : a logical vector, where 1 means, I'm in an interval. + - min_length: if provided, do not return intervals of length < min_length + - forgivingJump: if true, merge intervals that are separated by a distance < min_length + - removeSmallRel: remove intervals that have a small length compared to the max length of intervals + - removeSmallFact: factor used for removeSmallRel + - mergeCloseRel: merge intervals that are closer than a fraction of the typical distance between intervals + + OUTPUTS: + - IStart : ending indices + - IEnd : ending indices + - Length: interval lenghts (IEnd-IStart+1) + + IStart, IEnd, Lengths = intervals([False, True, True, False, True, True, True, False]) + np.testing.assert_equal(IStart , np.array([1,4])) + np.testing.assert_equal(IEnd , np.array([2,6])) + np.testing.assert_equal(Lengths, np.array([2,3])) + """ + b = np.asarray(b) + total = np.sum(b) + + min_length=max(min_length,1) + if forgivingJump: + min_jump=min_length + else: + min_jump=1 + + if total==0: + IStart = np.array([]) + IEnd = np.array([]) + Lengths= np.array([]) + return IStart, IEnd, Lengths + elif total==1: + i = np.where(b)[0][0] + IStart = np.array([i]) + IEnd = np.array([i]) + Lengths= np.array([1]) + else: + n = len(b) + Idx = np.arange(n)[b] + delta_Idx=np.diff(Idx) + jumps =np.where(delta_Idx>min_jump)[0] + if len(jumps)==0: + IStart = np.array([Idx[0]]) + IEnd = np.array([Idx[-1]]) + else: + istart=Idx[0] + jumps=np.concatenate(([-1],jumps,[len(Idx)-1])) + IStart = Idx[jumps[:-1]+1] # intervals start right after a jump + IEnd = Idx[jumps[1:]] # intervals stops at jump + Lengths = IEnd-IStart+1 + + # Removing intervals smaller than min_length + bKeep = Lengths>=min_length + IStart = IStart[bKeep] + IEnd = IEnd[bKeep] + Lengths = Lengths[bKeep] + # Removing intervals smaller than less than a fraction of the max interval + if removeSmallRel: + bKeep = Lengths>=removeSmallFact*np.max(Lengths) + IStart = IStart[bKeep] + IEnd = IEnd[bKeep] + Lengths = Lengths[bKeep] + + # Distances between intervals + if mergeCloseRel: + if len(IStart)<=2: + pass + else: + D = IStart[1:]-IEnd[0:-1] + #print('D',D,np.max(D),int(np.max(D) * mergeCloseFact)) + min_length = max(int(np.max(D) * mergeCloseFact), min_length) + if min_length<=1: + pass + else: + #print('Readjusting min_length to {} to accomodate for max interval spacing of {:.0f}'.format(min_length, np.mean(D))) + return intervals(b, min_length=min_length, forgivingJump=True, removeSmallRel=removeSmallRel, removeSmallFact=removeSmallFact, mergeCloseRel=False) + return IStart, IEnd, Lengths + +def peaks(x, threshold=0.3, threshold_abs=True, method='intervals', min_length=3, + mergeCloseRel=True, returnIntervals=False): + """ + Find peaks in a signal, above a given threshold + INPUTS: + - x : 1d-array, signal + - threshold : scalar, absolute or relative threshold beyond which peaks are looked for + relative threshold are proportion of the max-min of the signal (between 0-1) + - threshold_abs : boolean, specify whether the threshold is absolute or relative + - method : string, selects which method is used to find the peaks, between: + - 'interval' : one peak per interval above the threshold + - 'derivative': uses derivative to find maxima, may return more than one per interval + - min_length: + - if 'interval' method is used: minimum interval + - if 'derivative' method is used: minimum distance between two peaks + + OPTIONS for interval method: + - mergeCloseRel: logical, if True, attempts to merge intervals that are close to each other compare to the typical interval spacing + set to False if all peaks are wanted + - returnIntervals: logical, if true, return intervals used for interval method + OUTPUTS: + - I : index of the peaks + -[IStart, IEnd] if return intervals is true, see function `intervals` + + + see also: + scipy.signal.find_peaks + + """ + if not threshold_abs: + threshold = threshold * (np.max(y) - np.min(y)) + np.min(y) + + if method =='intervals': + IStart, IEnd, Lengths = intervals(x>threshold, min_length=min_length, mergeCloseRel=mergeCloseRel) + I = np.array([iS if L==1 else np.argmax(x[iS:iE+1])+iS for iS,iE,L in zip(IStart,IEnd,Lengths)]) + if returnIntervals: + return I, IStart, IEnd + else: + return I + + elif method =='derivative': + I = indexes(x, thres=threshold, thres_abs=True, min_dist=min_length) + return I + else: + raise NotImplementedError('Method {}'.format(method)) + + + +# --------------------------------------------------------------------------------} +# --- Simple signals +# --------------------------------------------------------------------------------{ +def impulse(time, t0=0, A=1, epsilon=None, **kwargs): + """ + returns a dirac function: + A/dt if t==t0 + 0 otherwise + + Since the impulse response is poorly defined in discrete space, it's recommended + to use a smooth_delta. See the welib.tools.functions.delta + """ + from .functions import delta + t=np.asarray(time)-t0 + y= delta(t, epsilon=epsilon, **kwargs)*A + return y + +def step(time, t0=0, valueAtStep=0, A=1): + """ + returns a step function: + 0 if tt0 + valueAtStep if t==t0 + + NOTE: see also welib.tools.functions.Pi + """ + return np.heaviside(time-t0, valueAtStep)*A + +def ramp(time, t0=0, valueAtStep=0, A=1): + """ + returns a ramp function: + 0 if t=t0 + + NOTE: see also welib.tools.functions.Pi + """ + t=np.asarray(time)-t0 + y=np.zeros(t.shape) + y[t>=0]=A*t[t>=0] + return y + + +def hat(time, T=1, t0=0, A=1, method='abs'): + """ + returns a hat function: + A*hat if |t-t0|tStart].copy() - - dfPsi= bin_DF(df, psiBin, colPsi) - if np.any(dfPsi['Counts']<1): - print('[WARN] some bins have no data! Increase the bin size.') - - return dfPsi - - - - +""" +Set of tools for statistics + - measures (R^2, RMSE) + - pdf distributions + - Binning + +""" +import numpy as np +import pandas as pd + +# --------------------------------------------------------------------------------} +# --- Stats measures +# --------------------------------------------------------------------------------{ +def rsquare(y,f, c = True): + """ Compute coefficient of determination of data fit model and RMSE + [r2 rmse] = rsquare(y,f) + [r2 rmse] = rsquare(y,f,c) + RSQUARE computes the coefficient of determination (R-square) value from + actual data Y and model data F. The code uses a general version of + R-square, based on comparing the variability of the estimation errors + with the variability of the original values. RSQUARE also outputs the + root mean squared error (RMSE) for the user's convenience. + Note: RSQUARE ignores comparisons involving NaN values. + INPUTS + Y : Actual data + F : Model fit + + # OPTION + C : Constant term in model + R-square may be a questionable measure of fit when no + constant term is included in the model. + [DEFAULT] TRUE : Use traditional R-square computation + FALSE : Uses alternate R-square computation for model + without constant term [R2 = 1 - NORM(Y-F)/NORM(Y)] + # OUTPUT + R2 : Coefficient of determination + RMSE : Root mean squared error """ + # Compare inputs + if not np.all(y.shape == f.shape) : + raise Exception('Y and F must be the same size') + # Check for NaN + tmp = np.logical_not(np.logical_or(np.isnan(y),np.isnan(f))) + y = y[tmp] + f = f[tmp] + if c: + r2 = max(0,1-np.sum((y-f)**2)/np.sum((y-np.mean(y))** 2)) + else: + r2 = 1 - np.sum((y - f) ** 2) / np.sum((y) ** 2) + if r2 < 0: + import warnings + warnings.warn('Consider adding a constant term to your model') + r2 = 0 + rmse = np.sqrt(np.mean((y - f) ** 2)) + return r2,rmse + +def mean_rel_err(t1, y1, t2, y2, method='mean'): + """ + Methods: + 'mean' : 100 * |y1-y2|/mean(y1) + 'meanabs': 100 * |y1-y2|/mean(|y1|) + 'minmax': y1 and y2 scaled between 0.001 and 1 + |y1s-y2s|/|y1| + """ + if len(y1)!=len(y2): + y2=np.interp(t1,t2,y2) + # Method 1 relative to mean + if method=='mean': + ref_val = np.mean(y1) + meanrelerr = np.mean(np.abs(y1-y2)/ref_val)*100 + elif method=='meanabs': + ref_val = np.mean(np.abs(y1)) + meanrelerr = np.mean(np.abs(y1-y2)/ref_val)*100 + elif method=='minmax': + # Method 2 scaling signals + Min=min(np.min(y1), np.min(y2)) + Max=max(np.max(y1), np.max(y2)) + y1=(y1-Min)/(Max-Min)+0.001 + y2=(y2-Min)/(Max-Min)+0.001 + meanrelerr = np.mean(np.abs(y1-y2)/np.abs(y1))*100 + #print('Mean rel error {:7.2f} %'.format( meanrelerr)) + return meanrelerr + + +# --------------------------------------------------------------------------------} +# --- PDF +# --------------------------------------------------------------------------------{ +def pdf_histogram(y,nBins=50, norm=True, count=False): + yh, xh = np.histogram(y[~np.isnan(y)], bins=nBins) + dx = xh[1] - xh[0] + xh = xh[:-1] + dx/2 + if count: + yh = yh / (len(n)*dx) # TODO DEBUG /VERIFY THIS + else: + yh = yh / (nBins*dx) + if norm: + yh=yh/np.trapz(yh,xh) + return xh,yh + +def pdf_gaussian_kde(data, bw='scott', nOut=100, cut=3, clip=(-np.inf,np.inf)): + """ + Returns a smooth probability density function (univariate kernel density estimate - kde) + Inspired from `_univariate_kdeplot` from `seaborn.distributions` + + INPUTS: + bw: float defining bandwidth or method (string) to find it (more or less sigma) + cut: number of bandwidth kept for x axis (e.g. 3 sigmas) + clip: (xmin, xmax) values + OUTPUTS: + x, y: where y(x) = pdf(data) + """ + from scipy import stats + from six import string_types + + data = np.asarray(data) + data = data[~np.isnan(data)] + # Gaussian kde + kde = stats.gaussian_kde(data, bw_method = bw) + # Finding a relevant support (i.e. x values) + if isinstance(bw, string_types): + bw_ = "scotts" if bw == "scott" else bw + bw = getattr(kde, "%s_factor" % bw_)() * np.std(data) + x_min = max(data.min() - bw * cut, clip[0]) + x_max = min(data.max() + bw * cut, clip[1]) + x = np.linspace(x_min, x_max, nOut) + # Computing kde on support + y = kde(x) + return x, y + + +def pdf_sklearn(y): + #from sklearn.neighbors import KernelDensity + #kde = KernelDensity(kernel='gaussian', bandwidth=0.75).fit(y) #you can supply a bandwidth + #x=np.linspace(0,5,100)[:, np.newaxis] + #log_density_values=kde.score_samples(x) + #density=np.exp(log_density) + pass + +def pdf_sns(y,nBins=50): + import seaborn.apionly as sns + hh=sns.distplot(y,hist=True,norm_hist=False).get_lines()[0].get_data() + xh=hh[0] + yh=hh[1] + return xh,yh + + + +# --------------------------------------------------------------------------------} +# --- Binning +# --------------------------------------------------------------------------------{ +def bin_DF(df, xbins, colBin, stats='mean'): + """ + Perform bin averaging of a dataframe + INPUTS: + - df : pandas dataframe + - xBins: end points delimiting the bins, array of ascending x values) + - colBin: column name (string) of the dataframe, used for binning + OUTPUTS: + binned dataframe, with additional columns 'Counts' for the number + + """ + if colBin not in df.columns.values: + raise Exception('The column `{}` does not appear to be in the dataframe'.format(colBin)) + xmid = (xbins[:-1]+xbins[1:])/2 + df['Bin'] = pd.cut(df[colBin], bins=xbins, labels=xmid ) # Adding a column that has bin attribute + if stats=='mean': + df2 = df.groupby('Bin').mean() # Average by bin + elif stats=='std': + df2 = df.groupby('Bin').std() # std by bin + # also counting + df['Counts'] = 1 + dfCount=df[['Counts','Bin']].groupby('Bin').sum() + df2['Counts'] = dfCount['Counts'] + # Just in case some bins are missing (will be nan) + df2 = df2.reindex(xmid) + return df2 + +def azimuthal_average_DF(df, psiBin=np.arange(0,360+1,10), colPsi='Azimuth_[deg]', tStart=None, colTime='Time_[s]'): + """ + Average a dataframe based on azimuthal value + Returns a dataframe with same amount of columns as input, and azimuthal values as index + """ + if tStart is not None: + if colTime not in df.columns.values: + raise Exception('The column `{}` does not appear to be in the dataframe'.format(colTime)) + df=df[ df[colTime]>tStart].copy() + + dfPsi= bin_DF(df, psiBin, colPsi, stats='mean') + if np.any(dfPsi['Counts']<1): + print('[WARN] some bins have no data! Increase the bin size.') + + return dfPsi + + +def azimuthal_std_DF(df, psiBin=np.arange(0,360+1,10), colPsi='Azimuth_[deg]', tStart=None, colTime='Time_[s]'): + """ + Average a dataframe based on azimuthal value + Returns a dataframe with same amount of columns as input, and azimuthal values as index + """ + if tStart is not None: + if colTime not in df.columns.values: + raise Exception('The column `{}` does not appear to be in the dataframe'.format(colTime)) + df=df[ df[colTime]>tStart].copy() + + dfPsi= bin_DF(df, psiBin, colPsi, stats='std') + if np.any(dfPsi['Counts']<1): + print('[WARN] some bins have no data! Increase the bin size.') + + return dfPsi + + + + diff --git a/weio b/weio index 1a04e7a..bdd10b4 160000 --- a/weio +++ b/weio @@ -1 +1 @@ -Subproject commit 1a04e7aaa825669bb020bf63b8e805991682c4be +Subproject commit bdd10b447bf484c9ebf6166dbaaf1741b4244f74 From 20026e2cbc75becc2f26d284775e614811420fc6 Mon Sep 17 00:00:00 2001 From: Emmanuel Branlard Date: Fri, 28 Jan 2022 23:55:09 -0700 Subject: [PATCH 17/36] Going through with plotdata even if all values are nan --- pydatview/plotdata.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/pydatview/plotdata.py b/pydatview/plotdata.py index a97417c..09073c9 100644 --- a/pydatview/plotdata.py +++ b/pydatview/plotdata.py @@ -301,7 +301,10 @@ def _xAtYMinCalc(PD, yMin): elif PD.xIsDate: return PD.x[0],'{}'.format(PD.x[0]) else: - v = PD.x[np.where(PD.y == yMin)[0][0]] + try: + v = PD.x[np.where(PD.y == yMin)[0][0]] # Might fail if all nan + except: + v = PD.x[0] s=pretty_num(v) return (v,s) @@ -311,7 +314,10 @@ def _xAtYMaxCalc(PD, yMax): elif PD.xIsDate: return PD.x[-1],'{}'.format(PD.x[-1]) else: - v = PD.x[np.where(PD.y == yMax)[0][0]] + try: + v = PD.x[np.where(PD.y == yMax)[0][0]] # Might fail if all nan + except: + v = PD.x[0] s=pretty_num(v) return (v,s) From 690be9172ecbde939b914ab366b7f3525de5a4c1 Mon Sep 17 00:00:00 2001 From: Emmanuel Branlard Date: Tue, 1 Feb 2022 20:10:08 -0700 Subject: [PATCH 18/36] Update of weio (turbsim, hawcstab2, hawc2 files) --- weio | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/weio b/weio index bdd10b4..4dc177c 160000 --- a/weio +++ b/weio @@ -1 +1 @@ -Subproject commit bdd10b447bf484c9ebf6166dbaaf1741b4244f74 +Subproject commit 4dc177c24eda16e50df831dd8f5cd0dcfddfe274 From 3cb3df5d9825b5744cb3932ba4f51d940228f7d7 Mon Sep 17 00:00:00 2001 From: Emmanuel Branlard Date: Tue, 1 Feb 2022 23:01:21 -0700 Subject: [PATCH 19/36] Remember fileformat when reloading (Close #103) and better handling of formulae on reload --- pydatview/Tables.py | 120 +++- pydatview/common.py | 2 +- pydatview/main.py | 1451 +++++++++++++++++++++--------------------- tests/test_Tables.py | 116 ++-- 4 files changed, 890 insertions(+), 799 deletions(-) diff --git a/pydatview/Tables.py b/pydatview/Tables.py index 0c8732c..16900ac 100644 --- a/pydatview/Tables.py +++ b/pydatview/Tables.py @@ -28,13 +28,25 @@ def __init__(self,tabs=[]): self._tabs=tabs self.Naming='Ellude' + # --- behaves like a list... + def __iter__(self): + self.__n = 0 + return self + + def __next__(self): + if self.__n < len(self._tabs): + self.__n += 1 + return self._tabs[self.__n-1] + else: + raise StopIteration + def append(self,t): if isinstance(t,list): self._tabs += t else: self._tabs += [t] - + # --- Main high level methods def from_dataframes(self, dataframes=[], names=[], bAdd=False): if not bAdd: self.clean() # TODO figure it out @@ -43,27 +55,33 @@ def from_dataframes(self, dataframes=[], names=[], bAdd=False): if df is not None: self.append(Table(data=df, name=name)) - def load_tables_from_files(self, filenames=[], fileformat=None, bAdd=False): - """ load multiple files, only trigger the plot at the end """ + def load_tables_from_files(self, filenames=[], fileformats=None, bAdd=False): + """ load multiple files into table list""" if not bAdd: self.clean() # TODO figure it out + + if fileformats is None: + fileformats=[None]*len(filenames) + assert type(fileformats) ==list, 'fileformats must be a list' + + # Loop through files, appending tables within files warnList=[] - for f in filenames: + for f,ff in zip(filenames, fileformats): if f in self.unique_filenames: warnList.append('Warn: Cannot add a file already opened ' + f) elif len(f)==0: pass # warn+= 'Warn: an empty filename was skipped' +'\n' else: - tabs, warnloc = self._load_file_tabs(f,fileformat=fileformat) + tabs, warnloc = self._load_file_tabs(f,fileformat=ff) if len(warnloc)>0: warnList.append(warnloc) self.append(tabs) return warnList - def _load_file_tabs(self,filename,fileformat=None): - """ load a single file, adds table, and potentially trigger plotting """ + def _load_file_tabs(self, filename, fileformat=None): + """ load a single file, adds table """ # Returning a list of tables tabs=[] warn='' @@ -71,7 +89,14 @@ def _load_file_tabs(self,filename,fileformat=None): warn = 'Error: File not found: `'+filename+'`\n' return tabs, warn try: - F = weio.read(filename,fileformat = fileformat) + #F = weio.read(filename, fileformat = fileformat) + # --- Expanded version of weio.read + F = None + if fileformat is None: + fileformat, F = weio.detectFormat(filename) + # Reading the file with the appropriate class if necessary + if not isinstance(F, fileformat.constructor): + F=fileformat.constructor(filename=filename) dfs = F.toDataFrame() except weio.FileNotFoundError as e: warn = 'Error: A file was not found!\n\n While opening:\n\n {}\n\n the following file was not found:\n\n {}\n'.format(filename, e.filename) @@ -101,11 +126,11 @@ def _load_file_tabs(self,filename,fileformat=None): pass elif not isinstance(dfs,dict): if len(dfs)>0: - tabs=[Table(data=dfs, filename=filename, fileformat=F.formatName())] + tabs=[Table(data=dfs, filename=filename, fileformat=fileformat)] else: for k in list(dfs.keys()): if len(dfs[k])>0: - tabs.append(Table(data=dfs[k], name=str(k), filename=filename, fileformat=F.formatName())) + tabs.append(Table(data=dfs[k], name=str(k), filename=filename, fileformat=fileformat)) if len(tabs)<=0: warn='Warn: No dataframe found in file: '+filename+'\n' return tabs, warn @@ -166,6 +191,7 @@ def getDisplayTabNames(self): else: raise Exception('Table naming unknown: {}'.format(self.Naming)) + # --- Properties @property def tabNames(self): return [t.name for t in self._tabs] @@ -174,10 +200,25 @@ def tabNames(self): def filenames(self): return [t.filename for t in self._tabs] + @property + def fileformats(self): + return [t.fileformat for t in self._tabs] + @property def unique_filenames(self): return list(set([t.filename for t in self._tabs])) + @property + def filenames_and_formats(self): + """ return unique list of filenames with associated fileformats """ + filenames = [] + fileformats = [] + for t in self._tabs: + if t.filename not in filenames: + filenames.append(t.filename) + fileformats.append(t.fileformat) + return filenames, fileformats + def clean(self): del self._tabs self._tabs=[] @@ -218,6 +259,7 @@ def applyCommonMaskString(self,maskString,bAdd=True): return dfs_new, names_new, errors + # --- Resampling and other actions def applyResampling(self,iCol,sampDict,bAdd=True): dfs_new = [] names_new = [] @@ -260,21 +302,40 @@ def get(self,i): # --------------------------------------------------------------------------------} # --- Table # --------------------------------------------------------------------------------{ -# TODO sort out the naming # -# Main naming concepts: -# name : -# active_name : -# raw_name : -# filename : class Table(object): - def __init__(self,data=None,name='',filename='',columns=[],fileformat=''): + """ + Main attributes: + - data + - columns + - name + - raw_name + - active_name + - filename + - fileformat + - fileformat_name + - nCols x nRows + - mask + - maskString + - formulas + """ + # TODO sort out the naming + # Main naming concepts: + # name : + # active_name : + # raw_name : + # filename : + def __init__(self,data=None,name='',filename='',columns=[], fileformat=None): # Default init self.maskString='' self.mask=None - self.filename = filename - self.fileformat = fileformat + self.filename = filename + self.fileformat = fileformat + if fileformat is not None: + self.fileformat_name = fileformat.name + else: + self.fileformat_name = '' self.formulas = [] if not isinstance(data,pd.DataFrame): @@ -320,7 +381,15 @@ def setupName(self,name=''): def __repr__(self): - return 'Tab {} ({}x{}) (raw: {}, active: {}, file: {})'.format(self.name,self.nCols,self.nRows,self.raw_name, self.active_name,self.filename) + s='Table object:\n' + s+=' - name: {}\n'.format(self.name) + s+=' - raw_name : {}\n'.format(self.raw_name) + s+=' - active_name: {}\n'.format(self.raw_name) + s+=' - filename : {}\n'.format(self.filename) + s+=' - fileformat : {}\n'.format(self.fileformat) + s+=' - fileformat_name : {}\n'.format(self.fileformat_name) + s+=' - nCols x nRows: {}x{}\n'.format(self.nCols, self.nRows) + return s def columnsFromDF(self,df): return [s.replace('_',' ') for s in df.columns.values.astype(str)] @@ -330,12 +399,6 @@ def clearMask(self): self.maskString='' self.mask=None - def addLabelToName(self,label): - print('raw_name',self.raw_name) - raw_name=self.raw_name - sp=raw_name.split('|') - print(sp) - def applyMaskString(self,maskString,bAdd=True): df = self.data Index = np.array(range(df.shape[0])) @@ -425,7 +488,7 @@ def convertTimeColumns(self): if len(self.data)>0: for i,c in enumerate(self.data.columns.values): y = self.data.iloc[:,i] - if y.dtype == np.object: + if y.dtype == object: if isinstance(y.values[0], str): # tring to convert to date try: @@ -480,6 +543,8 @@ def rename(self,new_name): def addColumn(self,sNewName,NewCol,i=-1,sFormula=''): if i<0: i=self.data.shape[1] + elif i>self.data.shape[1]+1: + i=self.data.shape[1] self.data.insert(int(i),sNewName,NewCol) self.columns=self.columnsFromDF(self.data) for f in self.formulas: @@ -540,7 +605,6 @@ def evalFormula(self,sFormula): c_no_unit = no_unit(c).strip() c_in_df = df.columns[i] sFormula=sFormula.replace('{'+c_no_unit+'}','df[\''+c_in_df+'\']') - #print(sFormula) try: NewCol=eval(sFormula) return NewCol diff --git a/pydatview/common.py b/pydatview/common.py index a455e46..acda0c1 100644 --- a/pydatview/common.py +++ b/pydatview/common.py @@ -448,7 +448,7 @@ def Error(parent, message, caption = 'Error!'): # --------------------------------------------------------------------------------{ def isString(x): - b = x.dtype == np.object and isinstance(x.values[0], str) + b = x.dtype == object and isinstance(x.values[0], str) return b def isDate(x): diff --git a/pydatview/main.py b/pydatview/main.py index 6e55868..34e8240 100644 --- a/pydatview/main.py +++ b/pydatview/main.py @@ -1,724 +1,727 @@ -from __future__ import division, unicode_literals, print_function, absolute_import -from builtins import map, range, chr, str -from io import open -from future import standard_library -standard_library.install_aliases() - -import numpy as np -import os.path -try: - import pandas as pd -except: - print('') - print('') - print('Error: problem loading pandas package:') - print(' - Check if this package is installed ( e.g. type: `pip install pandas`)') - print(' - If you are using anaconda, try `conda update python.app`') - print(' - If none of the above work, contact the developer.') - print('') - print('') - sys.exit(-1) - #raise - -import sys -import traceback -import gc - -# GUI -import wx -from .GUIPlotPanel import PlotPanel -from .GUISelectionPanel import SelectionPanel,SEL_MODES,SEL_MODES_ID -from .GUISelectionPanel import ColumnPopup,TablePopup -from .GUIInfoPanel import InfoPanel -from .GUIToolBox import GetKeyString, TBAddTool -from .Tables import TableList, Table -# Helper -from .common import * -from .GUICommon import * - - - -# --------------------------------------------------------------------------------} -# --- GLOBAL -# --------------------------------------------------------------------------------{ -PROG_NAME='pyDatView' -PROG_VERSION='v0.2-local' -try: - import weio # File Formats and File Readers - FILE_FORMATS= weio.fileFormats() -except: - print('') - print('Error: the python package `weio` was not imported successfully.\n') - print('Most likely the submodule `weio` was not cloned with `pyDatView`') - print('Type the following command to retrieve it:\n') - print(' git submodule update --init --recursive\n') - print('Alternatively re-clone this repository into a separate folder:\n') - print(' git clone --recurse-submodules https://github.com/ebranlard/pyDatView\n') - sys.exit(-1) -FILE_FORMATS_EXTENSIONS = [['.*']]+[f.extensions for f in FILE_FORMATS] -FILE_FORMATS_NAMES = ['auto (any supported file)'] + [f.name for f in FILE_FORMATS] -FILE_FORMATS_NAMEXT =['{} ({})'.format(n,','.join(e)) for n,e in zip(FILE_FORMATS_NAMES,FILE_FORMATS_EXTENSIONS)] - -SIDE_COL = [160,160,300,420,530] -SIDE_COL_LARGE = [200,200,360,480,600] -BOT_PANL =85 - -#matplotlib.rcParams['text.usetex'] = False -# matplotlib.rcParams['font.sans-serif'] = 'DejaVu Sans' -#matplotlib.rcParams['font.family'] = 'Arial' -#matplotlib.rcParams['font.sans-serif'] = 'Arial' -# matplotlib.rcParams['font.family'] = 'sans-serif' - - - - - -# --------------------------------------------------------------------------------} -# --- Drag and drop -# --------------------------------------------------------------------------------{ -# Implement File Drop Target class -class FileDropTarget(wx.FileDropTarget): - def __init__(self, parent): - wx.FileDropTarget.__init__(self) - self.parent = parent - def OnDropFiles(self, x, y, filenames): - filenames = [f for f in filenames if not os.path.isdir(f)] - filenames.sort() - if len(filenames)>0: - # If Ctrl is pressed we add - bAdd= wx.GetKeyState(wx.WXK_CONTROL); - iFormat=self.parent.comboFormats.GetSelection() - if iFormat==0: # auto-format - Format = None - else: - Format = FILE_FORMATS[iFormat-1] - self.parent.load_files(filenames,fileformat=Format,bAdd=bAdd) - return True - - - - -# --------------------------------------------------------------------------------} -# --- Main Frame -# --------------------------------------------------------------------------------{ -class MainFrame(wx.Frame): - def __init__(self, filename=None): - # Parent constructor - wx.Frame.__init__(self, None, -1, PROG_NAME+' '+PROG_VERSION) - # Data - self.tabList=TableList() - self.restore_formulas = [] - - # Hooking exceptions to display them to the user - sys.excepthook = MyExceptionHook - # --- GUI - #font = self.GetFont() - #print(font.GetFamily(),font.GetStyle(),font.GetPointSize()) - #font.SetFamily(wx.FONTFAMILY_DEFAULT) - #font.SetFamily(wx.FONTFAMILY_MODERN) - #font.SetFamily(wx.FONTFAMILY_SWISS) - #font.SetPointSize(8) - #print(font.GetFamily(),font.GetStyle(),font.GetPointSize()) - #self.SetFont(font) - # --- Menu - menuBar = wx.MenuBar() - - fileMenu = wx.Menu() - loadMenuItem = fileMenu.Append(wx.ID_NEW,"Open file" ,"Open file" ) - exptMenuItem = fileMenu.Append(-1 ,"Export table" ,"Export table" ) - saveMenuItem = fileMenu.Append(wx.ID_SAVE,"Save figure" ,"Save figure" ) - exitMenuItem = fileMenu.Append(wx.ID_EXIT, 'Quit', 'Quit application') - menuBar.Append(fileMenu, "&File") - self.Bind(wx.EVT_MENU,self.onExit ,exitMenuItem) - self.Bind(wx.EVT_MENU,self.onLoad ,loadMenuItem) - self.Bind(wx.EVT_MENU,self.onExport,exptMenuItem) - self.Bind(wx.EVT_MENU,self.onSave ,saveMenuItem) - - dataMenu = wx.Menu() - menuBar.Append(dataMenu, "&Data") - self.Bind(wx.EVT_MENU, lambda e: self.onShowTool(e, 'Mask') , dataMenu.Append(wx.ID_ANY, 'Mask')) - self.Bind(wx.EVT_MENU, lambda e: self.onShowTool(e,'Outlier'), dataMenu.Append(wx.ID_ANY, 'Outliers removal')) - self.Bind(wx.EVT_MENU, lambda e: self.onShowTool(e,'Filter') , dataMenu.Append(wx.ID_ANY, 'Filter')) - self.Bind(wx.EVT_MENU, lambda e: self.onShowTool(e,'Resample') , dataMenu.Append(wx.ID_ANY, 'Resample')) - self.Bind(wx.EVT_MENU, lambda e: self.onShowTool(e,'FASTRadialAverage'), dataMenu.Append(wx.ID_ANY, 'FAST - Radial average')) - - toolMenu = wx.Menu() - menuBar.Append(toolMenu, "&Tools") - self.Bind(wx.EVT_MENU,lambda e: self.onShowTool(e, 'CurveFitting'), toolMenu.Append(wx.ID_ANY, 'Curve fitting')) - self.Bind(wx.EVT_MENU,lambda e: self.onShowTool(e, 'LogDec') , toolMenu.Append(wx.ID_ANY, 'Damping from decay')) - - helpMenu = wx.Menu() - aboutMenuItem = helpMenu.Append(wx.NewId(), 'About', 'About') - menuBar.Append(helpMenu, "&Help") - self.SetMenuBar(menuBar) - self.Bind(wx.EVT_MENU,self.onAbout,aboutMenuItem) - - # --- ToolBar - tb = self.CreateToolBar(wx.TB_HORIZONTAL|wx.TB_TEXT|wx.TB_HORZ_LAYOUT) - self.toolBar = tb - self.comboFormats = wx.ComboBox(tb, choices = FILE_FORMATS_NAMEXT, style=wx.CB_READONLY) - self.comboFormats.SetSelection(0) - self.comboMode = wx.ComboBox(tb, choices = SEL_MODES, style=wx.CB_READONLY) - self.comboMode.SetSelection(0) - self.Bind(wx.EVT_COMBOBOX, self.onModeChange, self.comboMode ) - tb.AddSeparator() - tb.AddControl( wx.StaticText(tb, -1, 'Mode: ' ) ) - tb.AddControl( self.comboMode ) - tb.AddStretchableSpace() - tb.AddControl( wx.StaticText(tb, -1, 'Format: ' ) ) - tb.AddControl(self.comboFormats ) - tb.AddSeparator() - #bmp = wx.Bitmap('help.png') #wx.Bitmap("NEW.BMP", wx.BITMAP_TYPE_BMP) - TBAddTool(tb,"Open" ,wx.ArtProvider.GetBitmap(wx.ART_FILE_OPEN),self.onLoad) - TBAddTool(tb,"Reload",wx.ArtProvider.GetBitmap(wx.ART_REDO),self.onReload) - try: - TBAddTool(tb,"Add" ,wx.ArtProvider.GetBitmap(wx.ART_PLUS),self.onAdd) - except: - TBAddTool(tb,"Add" ,wx.ArtProvider.GetBitmap(wx.FILE_OPEN),self.onAdd) - #self.AddTBBitmapTool(tb,"Debug" ,wx.ArtProvider.GetBitmap(wx.ART_ERROR),self.onDEBUG) - tb.AddStretchableSpace() - tb.Realize() - - # --- Status bar - self.statusbar=self.CreateStatusBar(3, style=0) - self.statusbar.SetStatusWidths([200, -1, 70]) - - # --- Main Panel and Notebook - self.MainPanel = wx.Panel(self) - #self.MainPanel = wx.Panel(self, style=wx.RAISED_BORDER) - #self.MainPanel.SetBackgroundColour((200,0,0)) - - #self.nb = wx.Notebook(self.MainPanel) - #self.nb.Bind(wx.EVT_NOTEBOOK_PAGE_CHANGED, self.on_tab_change) - - - sizer = wx.BoxSizer() - #sizer.Add(self.nb, 1, flag=wx.EXPAND) - self.MainPanel.SetSizer(sizer) - - # --- Drag and drop - dd = FileDropTarget(self) - self.SetDropTarget(dd) - - # --- Main Frame (self) - self.FrameSizer = wx.BoxSizer(wx.VERTICAL) - slSep = wx.StaticLine(self, -1, size=wx.Size(-1,1), style=wx.LI_HORIZONTAL) - self.FrameSizer.Add(slSep ,0, flag=wx.EXPAND|wx.BOTTOM,border=0) - self.FrameSizer.Add(self.MainPanel,1, flag=wx.EXPAND,border=0) - self.SetSizer(self.FrameSizer) - - self.SetSize((900, 700)) - self.Center() - self.Show() - self.Bind(wx.EVT_SIZE, self.OnResizeWindow) - - # Shortcuts - idFilter=wx.NewId() - self.Bind(wx.EVT_MENU, self.onFilter, id=idFilter) - - accel_tbl = wx.AcceleratorTable( - [(wx.ACCEL_CTRL, ord('F'), idFilter )] - ) - self.SetAcceleratorTable(accel_tbl) - - - def onFilter(self,event): - if hasattr(self,'selPanel'): - self.selPanel.colPanel1.tFilter.SetFocus() - event.Skip() - - - - def clean_memory(self,bReload=False): - #print('Clean memory') - # force Memory cleanup - self.tabList.clean() - if not bReload: - if hasattr(self,'selPanel'): - self.selPanel.clean_memory() - if hasattr(self,'infoPanel'): - self.infoPanel.clean() - if hasattr(self,'plotPanel'): - self.plotPanel.cleanPlot() - gc.collect() - - def load_files(self, filenames=[], fileformat=None, bReload=False, bAdd=False): - """ load multiple files, only trigger the plot at the end """ - if bReload: - if hasattr(self,'selPanel'): - self.selPanel.saveSelection() # TODO move to tables - - if not bAdd: - self.clean_memory(bReload=bReload) - - base_filenames = [os.path.basename(f) for f in filenames] - filenames = [f for __, f in sorted(zip(base_filenames, filenames))] - # Load the tables - warnList = self.tabList.load_tables_from_files(filenames=filenames, fileformat=fileformat, bAdd=bAdd) - if bReload: - # Restore formulas that were previously added - _ITab, _STab = self.selPanel.getAllTables() - ITab = [iTab for __, iTab in sorted(zip(_STab, _ITab))] - if len(ITab) != len(self.restore_formulas): - raise ValueError('Invalid length of tabs and formulas!') - for iTab, f_list in zip(ITab, self.restore_formulas): - for f in f_list: - self.tabList.get(iTab).addColumnByFormula(f['name'], f['formula']) - self.restore_formulas = [] - for warn in warnList: - Warn(self,warn) - if self.tabList.len()>0: - self.load_tabs_into_GUI(bReload=bReload, bAdd=bAdd, bPlot=True) - - def load_df(self, df, name=None, bAdd=False, bPlot=True): - if bAdd: - self.tabList.append(Table(data=df, name=name)) - else: - self.tabList = TableList( [Table(data=df, name=name)] ) - self.load_tabs_into_GUI(bAdd=bAdd, bPlot=bPlot) - if hasattr(self,'selPanel'): - self.selPanel.updateLayout(SEL_MODES_ID[self.comboMode.GetSelection()]) - - def load_dfs(self, dfs, names, bAdd=False): - self.tabList.from_dataframes(dataframes=dfs, names=names, bAdd=bAdd) - self.load_tabs_into_GUI(bAdd=bAdd, bPlot=True) - if hasattr(self,'selPanel'): - self.selPanel.updateLayout(SEL_MODES_ID[self.comboMode.GetSelection()]) - - def load_tabs_into_GUI(self, bReload=False, bAdd=False, bPlot=True): - if bAdd: - if not hasattr(self,'selPanel'): - bAdd=False - - if (not bReload) and (not bAdd): - self.cleanGUI() - self.Freeze() - # Setting status bar - self.setStatusBar() - - if bReload or bAdd: - self.selPanel.update_tabs(self.tabList) - else: - mode = SEL_MODES_ID[self.comboMode.GetSelection()] - #self.vSplitter = wx.SplitterWindow(self.nb) - self.vSplitter = wx.SplitterWindow(self.MainPanel) - self.selPanel = SelectionPanel(self.vSplitter, self.tabList, mode=mode, mainframe=self) - self.tSplitter = wx.SplitterWindow(self.vSplitter) - #self.tSplitter.SetMinimumPaneSize(20) - self.infoPanel = InfoPanel(self.tSplitter) - self.plotPanel = PlotPanel(self.tSplitter, self.selPanel, self.infoPanel, self) - self.tSplitter.SetSashGravity(0.9) - self.tSplitter.SplitHorizontally(self.plotPanel, self.infoPanel) - self.tSplitter.SetMinimumPaneSize(BOT_PANL) - self.tSplitter.SetSashGravity(1) - self.tSplitter.SetSashPosition(400) - - self.vSplitter.SplitVertically(self.selPanel, self.tSplitter) - self.vSplitter.SetMinimumPaneSize(SIDE_COL[0]) - self.tSplitter.SetSashPosition(SIDE_COL[0]) - - #self.nb.AddPage(self.vSplitter, "Plot") - #self.nb.SendSizeEvent() - - sizer = self.MainPanel.GetSizer() - sizer.Add(self.vSplitter, 1, flag=wx.EXPAND,border=0) - self.MainPanel.SetSizer(sizer) - self.FrameSizer.Layout() - - self.Bind(wx.EVT_COMBOBOX, self.onColSelectionChange, self.selPanel.colPanel1.comboX ) - self.Bind(wx.EVT_LISTBOX , self.onColSelectionChange, self.selPanel.colPanel1.lbColumns) - self.Bind(wx.EVT_COMBOBOX, self.onColSelectionChange, self.selPanel.colPanel2.comboX ) - self.Bind(wx.EVT_LISTBOX , self.onColSelectionChange, self.selPanel.colPanel2.lbColumns) - self.Bind(wx.EVT_COMBOBOX, self.onColSelectionChange, self.selPanel.colPanel3.comboX ) - self.Bind(wx.EVT_LISTBOX , self.onColSelectionChange, self.selPanel.colPanel3.lbColumns) - self.Bind(wx.EVT_LISTBOX , self.onTabSelectionChange, self.selPanel.tabPanel.lbTab) - self.Bind(wx.EVT_SPLITTER_SASH_POS_CHANGED, self.onSashChangeMain, self.vSplitter) - - self.selPanel.tabPanel.lbTab.Bind(wx.EVT_RIGHT_DOWN, self.OnTabPopup) - - # plot trigger - if bPlot: - self.mainFrameUpdateLayout() - self.onColSelectionChange(event=None) - try: - self.Thaw() - except: - pass - # Hack - #self.onShowTool(tool='Resample') - - def setStatusBar(self, ISel=None): - nTabs=self.tabList.len() - if ISel is None: - ISel = list(np.arange(nTabs)) - if nTabs<0: - self.statusbar.SetStatusText('', 0) # Format - self.statusbar.SetStatusText('', 1) # Filenames - self.statusbar.SetStatusText('', 2) # Shape - elif nTabs==1: - self.statusbar.SetStatusText(self.tabList.get(0).fileformat, 0) - self.statusbar.SetStatusText(self.tabList.get(0).filename , 1) - self.statusbar.SetStatusText(self.tabList.get(0).shapestring, 2) - elif len(ISel)==1: - self.statusbar.SetStatusText(self.tabList.get(ISel[0]).fileformat , 0) - self.statusbar.SetStatusText(self.tabList.get(ISel[0]).filename , 1) - self.statusbar.SetStatusText(self.tabList.get(ISel[0]).shapestring, 2) - else: - self.statusbar.SetStatusText('' ,0) - self.statusbar.SetStatusText(", ".join(list(set([self.tabList.filenames[i] for i in ISel]))),1) - self.statusbar.SetStatusText('',2) - - def renameTable(self, iTab, newName): - oldName = self.tabList.renameTable(iTab, newName) - self.selPanel.renameTable(iTab, oldName, newName) - - def sortTabs(self, method='byName'): - self.tabList.sort(method=method) - # Updating tables - self.selPanel.update_tabs(self.tabList) - # Trigger a replot - self.onTabSelectionChange() - - - def deleteTabs(self, I): - self.tabList.deleteTabs(I) - - # Invalidating selections - self.selPanel.tabPanel.lbTab.SetSelection(-1) - # Until we have something better, we empty plot - self.plotPanel.empty() - self.infoPanel.empty() - self.selPanel.clean_memory() - # Updating tables - self.selPanel.update_tabs(self.tabList) - # Trigger a replot - self.onTabSelectionChange() - - def exportTab(self, iTab): - tab=self.tabList.get(iTab) - default_filename=tab.basename +'.csv' - with wx.FileDialog(self, "Save to CSV file",defaultFile=default_filename, - style=wx.FD_SAVE | wx.FD_OVERWRITE_PROMPT) as dlg: - #, wildcard="CSV files (*.csv)|*.csv", - dlg.CentreOnParent() - if dlg.ShowModal() == wx.ID_CANCEL: - return # the user changed their mind - tab.export(dlg.GetPath()) - - def onShowTool(self, event=None, tool=''): - """ - tool in 'Outlier', 'Filter', 'LogDec','FASTRadialAverage', 'Mask', 'CurveFitting' - """ - if not hasattr(self,'plotPanel'): - Error(self,'Plot some data first') - return - self.plotPanel.showTool(tool) - - def onSashChangeMain(self,event=None): - pass - # doent work because size is not communicated yet - #if hasattr(self,'selPanel'): - # print('ON SASH') - # self.selPanel.setEquiSash(event) - - def OnTabPopup(self,event): - menu = TablePopup(self,self.selPanel.tabPanel.lbTab) - self.PopupMenu(menu, event.GetPosition()) - menu.Destroy() - - def onTabSelectionChange(self,event=None): - # TODO This can be cleaned-up - ISel=self.selPanel.tabPanel.lbTab.GetSelections() - if len(ISel)>0: - # Letting seletion panel handle the change - self.selPanel.tabSelectionChanged() - # Update of status bar - self.setStatusBar(ISel) - # Trigger the colSelection Event - self.onColSelectionChange(event=None) - - def onColSelectionChange(self,event=None): - if hasattr(self,'plotPanel'): - # Letting selection panel handle the change - self.selPanel.colSelectionChanged() - # Redrawing - self.plotPanel.load_and_draw() - # --- Stats trigger - #self.showStats() - - def redraw(self): - if hasattr(self,'plotPanel'): - self.plotPanel.load_and_draw() -# def showStats(self): -# self.infoPanel.showStats(self.plotPanel.plotData,self.plotPanel.pltTypePanel.plotType()) - - def onExit(self, event): - self.Close() - - def cleanGUI(self, event=None): - if hasattr(self,'plotPanel'): - del self.plotPanel - if hasattr(self,'selPanel'): - del self.selPanel - if hasattr(self,'infoPanel'): - del self.infoPanel - #self.deletePages() - try: - self.MainPanel.GetSizer().Clear(delete_windows=True) # Delete Windows - except: - self.MainPanel.GetSizer().Clear() - self.FrameSizer.Layout() - gc.collect() - - def onSave(self, event=None): - # using the navigation toolbar save functionality - self.plotPanel.navTB.save_figure() - - def onAbout(self, event=None): - Info(self,PROG_NAME+' '+PROG_VERSION+'\n\nVisit http://github.com/ebranlard/pyDatView for documentation.') - - def onReload(self, event=None): - filenames = self.tabList.unique_filenames - filenames.sort() - if len(filenames)>0: - # Save formulas to restore them after reload with sorted tabs - _ITab, _STab = self.selPanel.getAllTables() - ITab = [iTab for __, iTab in sorted(zip(_STab, _ITab))] - self.restore_formulas = [] - for iTab in ITab: - f = self.tabList.get(iTab).formulas - f = sorted(f, key=lambda k: k['pos']) - self.restore_formulas.append(f) - iFormat=self.comboFormats.GetSelection() - if iFormat==0: # auto-format - Format = None - else: - Format = FILE_FORMATS[iFormat-1] - self.load_files(filenames,fileformat=Format,bReload=True,bAdd=False) - else: - Error(self,'Open one or more file first.') - - def onDEBUG(self, event=None): - #self.clean_memory() - self.plotPanel.ctrlPanel.Refresh() - self.plotPanel.cb_sizer.ForceRefresh() - - def onExport(self, event=None): - ISel=[] - try: - ISel = self.selPanel.tabPanel.lbTab.GetSelections() - except: - pass - if len(ISel)>0: - self.exportTab(ISel[0]) - else: - Error(self,'Open a file and select a table first.') - - def onLoad(self, event=None): - self.selectFile(bAdd=False) - - def onAdd(self, event=None): - self.selectFile(bAdd=self.tabList.len()>0) - - def selectFile(self,bAdd=False): - # --- File Format extension - iFormat=self.comboFormats.GetSelection() - sFormat=self.comboFormats.GetStringSelection() - if iFormat==0: # auto-format - Format = None - #wildcard = 'all (*.*)|*.*' - wildcard='|'.join([n+'|*'+';*'.join(e) for n,e in zip(FILE_FORMATS_NAMEXT,FILE_FORMATS_EXTENSIONS)]) - #wildcard = sFormat + extensions+'|all (*.*)|*.*' - else: - Format = FILE_FORMATS[iFormat-1] - extensions = '|*'+';*'.join(FILE_FORMATS[iFormat-1].extensions) - wildcard = sFormat + extensions+'|all (*.*)|*.*' - - with wx.FileDialog(self, "Open file", wildcard=wildcard, - style=wx.FD_OPEN | wx.FD_FILE_MUST_EXIST | wx.FD_MULTIPLE) as dlg: - #other options: wx.CHANGE_DIR - #dlg.SetSize((100,100)) - #dlg.Center() - if dlg.ShowModal() == wx.ID_CANCEL: - return # the user changed their mind - self.load_files(dlg.GetPaths(),fileformat=Format,bAdd=bAdd) - - def onModeChange(self, event=None): - if hasattr(self,'selPanel'): - self.selPanel.updateLayout(SEL_MODES_ID[self.comboMode.GetSelection()]) - self.mainFrameUpdateLayout() - # --- Trigger to check number of columns - self.onTabSelectionChange() - - def mainFrameUpdateLayout(self, event=None): - if hasattr(self,'selPanel'): - nWind=self.selPanel.splitter.nWindows - if self.Size[0]<=800: - sash=SIDE_COL[nWind] - else: - sash=SIDE_COL_LARGE[nWind] - self.resizeSideColumn(sash) - - def OnResizeWindow(self, event): - try: - self.mainFrameUpdateLayout() - self.Layout() - except: - pass - # NOTE: doesn't work... - #if hasattr(self,'plotPanel'): - # Subplot spacing changes based on figure size - #print('>>> RESIZE WINDOW') - #self.redraw() - - # --- Side column - def resizeSideColumn(self,width): - # To force the replot we do an epic unsplit/split... - #self.vSplitter.Unsplit() - #self.vSplitter.SplitVertically(self.selPanel, self.tSplitter) - self.vSplitter.SetMinimumPaneSize(width) - self.vSplitter.SetSashPosition(width) - #self.selPanel.splitter.setEquiSash() - - # --- NOTEBOOK - #def deletePages(self): - # for index in reversed(range(self.nb.GetPageCount())): - # self.nb.DeletePage(index) - # self.nb.SendSizeEvent() - # gc.collect() - #def on_tab_change(self, event=None): - # page_to_select = event.GetSelection() - # wx.CallAfter(self.fix_focus, page_to_select) - # event.Skip(True) - #def fix_focus(self, page_to_select): - # page = self.nb.GetPage(page_to_select) - # page.SetFocus() - -#---------------------------------------------------------------------- -def MyExceptionHook(etype, value, trace): - """ - Handler for all unhandled exceptions. - :param `etype`: the exception type (`SyntaxError`, `ZeroDivisionError`, etc...); - :type `etype`: `Exception` - :param string `value`: the exception error message; - :param string `trace`: the traceback header, if any (otherwise, it prints the - standard Python header: ``Traceback (most recent call last)``. - """ - from wx._core import wxAssertionError - # Printing exception - traceback.print_exception(etype, value, trace) - if etype==wxAssertionError: - if wx.Platform == '__WXMAC__': - # We skip these exceptions on macos (likely bitmap size 0) - return - # Then showing to user the last error - frame = wx.GetApp().GetTopWindow() - tmp = traceback.format_exception(etype, value, trace) - if tmp[-1].find('Exception: Error:')==0: - Error(frame,tmp[-1][18:]) - elif tmp[-1].find('Exception: Warn:')==0: - Warn(frame,tmp[-1][17:]) - else: - exception = 'The following exception occured:\n\n'+ tmp[-1] + '\n'+tmp[-2].strip() - Error(frame,exception) - try: - frame.Thaw() # Make sure any freeze event is stopped - except: - pass - -# --------------------------------------------------------------------------------} -# --- Tests -# --------------------------------------------------------------------------------{ -def test(filenames=None): - if filenames is not None: - app = wx.App(False) - frame = MainFrame() - frame.load_files(filenames,fileformat=None) - return - -# --------------------------------------------------------------------------------} -# --- Wrapped WxApp -# --------------------------------------------------------------------------------{ -class MyWxApp(wx.App): - def __init__(self, redirect=False, filename=None): - try: - wx.App.__init__(self, redirect, filename) - except: - if wx.Platform == '__WXMAC__': - #msg = """This program needs access to the screen. - # Please run with 'pythonw', not 'python', and only when you are logged - # in on the main display of your Mac.""" - msg= """ -MacOS Error: - This program needs access to the screen. Please run with a - Framework build of python, and only when you are logged in - on the main display of your Mac. - -pyDatView help: - You see the error above because you are using a Mac and - the python executable you are using does not have access to - your screen. This is a Mac issue, not a pyDatView issue. - Instead of calling 'python pyDatView.py', you need to find - another python and do '/path/python pyDatView.py' - You can try './pythonmac pyDatView.py', a script provided - in this repository to detect the path (in some cases) - - You can find additional help in the file 'README.md'. - - For quick reference, here are some typical cases: - - Your python was installed with 'brew', then likely use - /usr/lib/Cellar/python/XXXXX/Frameworks/python.framework/Versions/XXXX/bin/pythonXXX; - - Your python is an anaconda python, use something like:; - /anaconda3/bin/python.app (NOTE: the '.app'! - - You are using a python 2 version, you can use the system one: - /Library/Frameworks/Python.framework/Versions/XXX/bin/pythonXXX - /System/Library/Frameworks/Python.framework/Versions/XXX/bin/pythonXXX -""" - - elif wx.Platform == '__WXGTK__': - msg =""" -Error: - Unable to access the X Display, is $DISPLAY set properly? - -pyDatView help: - You are probably running this application on a server accessed via ssh. - Use `ssh -X` or `ssh -Y` to access the server. - Else, try setting up $DISPLAY before doing the ssh connection. -""" - else: - msg = 'Unable to create GUI' # TODO: more description is needed for wxMSW... - raise SystemExit(msg) - -# --------------------------------------------------------------------------------} -# --- Mains -# --------------------------------------------------------------------------------{ -def showApp(firstArg=None,dataframe=None,filenames=[]): - """ - The main function to start the data frame GUI. - """ - app = MyWxApp(False) - frame = MainFrame() - # Optional first argument - if firstArg is not None: - if isinstance(firstArg,list): - filenames=firstArg - elif isinstance(firstArg,str): - filenames=[firstArg] - elif isinstance(firstArg, pd.DataFrame): - dataframe=firstArg - # - if (dataframe is not None) and (len(dataframe)>0): - #import time - #tstart = time.time() - frame.load_df(dataframe) - #tend = time.time() - #print('PydatView time: ',tend-tstart) - elif len(filenames)>0: - frame.load_files(filenames,fileformat=None) - app.MainLoop() - -def cmdline(): - if len(sys.argv)>1: - pydatview(filename=sys.argv[1]) - else: - pydatview() +from __future__ import division, unicode_literals, print_function, absolute_import +from builtins import map, range, chr, str +from io import open +from future import standard_library +standard_library.install_aliases() + +import numpy as np +import os.path +try: + import pandas as pd +except: + print('') + print('') + print('Error: problem loading pandas package:') + print(' - Check if this package is installed ( e.g. type: `pip install pandas`)') + print(' - If you are using anaconda, try `conda update python.app`') + print(' - If none of the above work, contact the developer.') + print('') + print('') + sys.exit(-1) + #raise + +import sys +import traceback +import gc + +# GUI +import wx +from .GUIPlotPanel import PlotPanel +from .GUISelectionPanel import SelectionPanel,SEL_MODES,SEL_MODES_ID +from .GUISelectionPanel import ColumnPopup,TablePopup +from .GUIInfoPanel import InfoPanel +from .GUIToolBox import GetKeyString, TBAddTool +from .Tables import TableList, Table +# Helper +from .common import * +from .GUICommon import * + + + +# --------------------------------------------------------------------------------} +# --- GLOBAL +# --------------------------------------------------------------------------------{ +PROG_NAME='pyDatView' +PROG_VERSION='v0.2-local' +try: + import weio # File Formats and File Readers + FILE_FORMATS= weio.fileFormats() +except: + print('') + print('Error: the python package `weio` was not imported successfully.\n') + print('Most likely the submodule `weio` was not cloned with `pyDatView`') + print('Type the following command to retrieve it:\n') + print(' git submodule update --init --recursive\n') + print('Alternatively re-clone this repository into a separate folder:\n') + print(' git clone --recurse-submodules https://github.com/ebranlard/pyDatView\n') + sys.exit(-1) +FILE_FORMATS_EXTENSIONS = [['.*']]+[f.extensions for f in FILE_FORMATS] +FILE_FORMATS_NAMES = ['auto (any supported file)'] + [f.name for f in FILE_FORMATS] +FILE_FORMATS_NAMEXT =['{} ({})'.format(n,','.join(e)) for n,e in zip(FILE_FORMATS_NAMES,FILE_FORMATS_EXTENSIONS)] + +SIDE_COL = [160,160,300,420,530] +SIDE_COL_LARGE = [200,200,360,480,600] +BOT_PANL =85 + +#matplotlib.rcParams['text.usetex'] = False +# matplotlib.rcParams['font.sans-serif'] = 'DejaVu Sans' +#matplotlib.rcParams['font.family'] = 'Arial' +#matplotlib.rcParams['font.sans-serif'] = 'Arial' +# matplotlib.rcParams['font.family'] = 'sans-serif' + + + + + +# --------------------------------------------------------------------------------} +# --- Drag and drop +# --------------------------------------------------------------------------------{ +# Implement File Drop Target class +class FileDropTarget(wx.FileDropTarget): + def __init__(self, parent): + wx.FileDropTarget.__init__(self) + self.parent = parent + def OnDropFiles(self, x, y, filenames): + filenames = [f for f in filenames if not os.path.isdir(f)] + filenames.sort() + if len(filenames)>0: + # If Ctrl is pressed we add + bAdd= wx.GetKeyState(wx.WXK_CONTROL); + iFormat=self.parent.comboFormats.GetSelection() + if iFormat==0: # auto-format + Format = None + else: + Format = FILE_FORMATS[iFormat-1] + self.parent.load_files(filenames, fileformats=[Format]*len(filenames), bAdd=bAdd) + return True + + + + +# --------------------------------------------------------------------------------} +# --- Main Frame +# --------------------------------------------------------------------------------{ +class MainFrame(wx.Frame): + def __init__(self, filename=None): + # Parent constructor + wx.Frame.__init__(self, None, -1, PROG_NAME+' '+PROG_VERSION) + # Data + self.tabList=TableList() + self.restore_formulas = [] + + # Hooking exceptions to display them to the user + sys.excepthook = MyExceptionHook + # --- GUI + #font = self.GetFont() + #print(font.GetFamily(),font.GetStyle(),font.GetPointSize()) + #font.SetFamily(wx.FONTFAMILY_DEFAULT) + #font.SetFamily(wx.FONTFAMILY_MODERN) + #font.SetFamily(wx.FONTFAMILY_SWISS) + #font.SetPointSize(8) + #print(font.GetFamily(),font.GetStyle(),font.GetPointSize()) + #self.SetFont(font) + # --- Menu + menuBar = wx.MenuBar() + + fileMenu = wx.Menu() + loadMenuItem = fileMenu.Append(wx.ID_NEW,"Open file" ,"Open file" ) + exptMenuItem = fileMenu.Append(-1 ,"Export table" ,"Export table" ) + saveMenuItem = fileMenu.Append(wx.ID_SAVE,"Save figure" ,"Save figure" ) + exitMenuItem = fileMenu.Append(wx.ID_EXIT, 'Quit', 'Quit application') + menuBar.Append(fileMenu, "&File") + self.Bind(wx.EVT_MENU,self.onExit ,exitMenuItem) + self.Bind(wx.EVT_MENU,self.onLoad ,loadMenuItem) + self.Bind(wx.EVT_MENU,self.onExport,exptMenuItem) + self.Bind(wx.EVT_MENU,self.onSave ,saveMenuItem) + + dataMenu = wx.Menu() + menuBar.Append(dataMenu, "&Data") + self.Bind(wx.EVT_MENU, lambda e: self.onShowTool(e, 'Mask') , dataMenu.Append(wx.ID_ANY, 'Mask')) + self.Bind(wx.EVT_MENU, lambda e: self.onShowTool(e,'Outlier'), dataMenu.Append(wx.ID_ANY, 'Outliers removal')) + self.Bind(wx.EVT_MENU, lambda e: self.onShowTool(e,'Filter') , dataMenu.Append(wx.ID_ANY, 'Filter')) + self.Bind(wx.EVT_MENU, lambda e: self.onShowTool(e,'Resample') , dataMenu.Append(wx.ID_ANY, 'Resample')) + self.Bind(wx.EVT_MENU, lambda e: self.onShowTool(e,'FASTRadialAverage'), dataMenu.Append(wx.ID_ANY, 'FAST - Radial average')) + + toolMenu = wx.Menu() + menuBar.Append(toolMenu, "&Tools") + self.Bind(wx.EVT_MENU,lambda e: self.onShowTool(e, 'CurveFitting'), toolMenu.Append(wx.ID_ANY, 'Curve fitting')) + self.Bind(wx.EVT_MENU,lambda e: self.onShowTool(e, 'LogDec') , toolMenu.Append(wx.ID_ANY, 'Damping from decay')) + + helpMenu = wx.Menu() + aboutMenuItem = helpMenu.Append(wx.NewId(), 'About', 'About') + menuBar.Append(helpMenu, "&Help") + self.SetMenuBar(menuBar) + self.Bind(wx.EVT_MENU,self.onAbout,aboutMenuItem) + + # --- ToolBar + tb = self.CreateToolBar(wx.TB_HORIZONTAL|wx.TB_TEXT|wx.TB_HORZ_LAYOUT) + self.toolBar = tb + self.comboFormats = wx.ComboBox(tb, choices = FILE_FORMATS_NAMEXT, style=wx.CB_READONLY) + self.comboFormats.SetSelection(0) + self.comboMode = wx.ComboBox(tb, choices = SEL_MODES, style=wx.CB_READONLY) + self.comboMode.SetSelection(0) + self.Bind(wx.EVT_COMBOBOX, self.onModeChange, self.comboMode ) + tb.AddSeparator() + tb.AddControl( wx.StaticText(tb, -1, 'Mode: ' ) ) + tb.AddControl( self.comboMode ) + tb.AddStretchableSpace() + tb.AddControl( wx.StaticText(tb, -1, 'Format: ' ) ) + tb.AddControl(self.comboFormats ) + tb.AddSeparator() + #bmp = wx.Bitmap('help.png') #wx.Bitmap("NEW.BMP", wx.BITMAP_TYPE_BMP) + TBAddTool(tb,"Open" ,wx.ArtProvider.GetBitmap(wx.ART_FILE_OPEN),self.onLoad) + TBAddTool(tb,"Reload",wx.ArtProvider.GetBitmap(wx.ART_REDO),self.onReload) + try: + TBAddTool(tb,"Add" ,wx.ArtProvider.GetBitmap(wx.ART_PLUS),self.onAdd) + except: + TBAddTool(tb,"Add" ,wx.ArtProvider.GetBitmap(wx.FILE_OPEN),self.onAdd) + #self.AddTBBitmapTool(tb,"Debug" ,wx.ArtProvider.GetBitmap(wx.ART_ERROR),self.onDEBUG) + tb.AddStretchableSpace() + tb.Realize() + + # --- Status bar + self.statusbar=self.CreateStatusBar(3, style=0) + self.statusbar.SetStatusWidths([200, -1, 70]) + + # --- Main Panel and Notebook + self.MainPanel = wx.Panel(self) + #self.MainPanel = wx.Panel(self, style=wx.RAISED_BORDER) + #self.MainPanel.SetBackgroundColour((200,0,0)) + + #self.nb = wx.Notebook(self.MainPanel) + #self.nb.Bind(wx.EVT_NOTEBOOK_PAGE_CHANGED, self.on_tab_change) + + + sizer = wx.BoxSizer() + #sizer.Add(self.nb, 1, flag=wx.EXPAND) + self.MainPanel.SetSizer(sizer) + + # --- Drag and drop + dd = FileDropTarget(self) + self.SetDropTarget(dd) + + # --- Main Frame (self) + self.FrameSizer = wx.BoxSizer(wx.VERTICAL) + slSep = wx.StaticLine(self, -1, size=wx.Size(-1,1), style=wx.LI_HORIZONTAL) + self.FrameSizer.Add(slSep ,0, flag=wx.EXPAND|wx.BOTTOM,border=0) + self.FrameSizer.Add(self.MainPanel,1, flag=wx.EXPAND,border=0) + self.SetSizer(self.FrameSizer) + + self.SetSize((900, 700)) + self.Center() + self.Show() + self.Bind(wx.EVT_SIZE, self.OnResizeWindow) + + # Shortcuts + idFilter=wx.NewId() + self.Bind(wx.EVT_MENU, self.onFilter, id=idFilter) + + accel_tbl = wx.AcceleratorTable( + [(wx.ACCEL_CTRL, ord('F'), idFilter )] + ) + self.SetAcceleratorTable(accel_tbl) + + + def onFilter(self,event): + if hasattr(self,'selPanel'): + self.selPanel.colPanel1.tFilter.SetFocus() + event.Skip() + + + + def clean_memory(self,bReload=False): + #print('Clean memory') + # force Memory cleanup + self.tabList.clean() + if not bReload: + if hasattr(self,'selPanel'): + self.selPanel.clean_memory() + if hasattr(self,'infoPanel'): + self.infoPanel.clean() + if hasattr(self,'plotPanel'): + self.plotPanel.cleanPlot() + gc.collect() + + def load_files(self, filenames=[], fileformats=None, bReload=False, bAdd=False): + """ load multiple files, only trigger the plot at the end """ + if bReload: + if hasattr(self,'selPanel'): + self.selPanel.saveSelection() # TODO move to tables + + if not bAdd: + self.clean_memory(bReload=bReload) + + + if fileformats is None: + fileformats=[None]*len(filenames) + assert type(fileformats)==list, 'fileformats must be a list' + assert len(fileformats)==len(filenames), 'fileformats and filenames must have the same lengths' + + # Sorting files in alphabetical order in base_filenames order + base_filenames = [os.path.basename(f) for f in filenames] + I = np.argsort(base_filenames) + filenames = list(np.array(filenames)[I]) + fileformats = list(np.array(fileformats)[I]) + #filenames = [f for __, f in sorted(zip(base_filenames, filenames))] + + # Load the tables + warnList = self.tabList.load_tables_from_files(filenames=filenames, fileformats=fileformats, bAdd=bAdd) + if bReload: + # Restore formulas that were previously added + for tab in self.tabList: + if tab.raw_name in self.restore_formulas.keys(): + for f in self.restore_formulas[tab.raw_name]: + tab.addColumnByFormula(f['name'], f['formula'], f['pos']-1) + self.restore_formulas = {} + # Display warnings + for warn in warnList: + Warn(self,warn) + # Load tables into the GUI + if self.tabList.len()>0: + self.load_tabs_into_GUI(bReload=bReload, bAdd=bAdd, bPlot=True) + + def load_df(self, df, name=None, bAdd=False, bPlot=True): + if bAdd: + self.tabList.append(Table(data=df, name=name)) + else: + self.tabList = TableList( [Table(data=df, name=name)] ) + self.load_tabs_into_GUI(bAdd=bAdd, bPlot=bPlot) + if hasattr(self,'selPanel'): + self.selPanel.updateLayout(SEL_MODES_ID[self.comboMode.GetSelection()]) + + def load_dfs(self, dfs, names, bAdd=False): + self.tabList.from_dataframes(dataframes=dfs, names=names, bAdd=bAdd) + self.load_tabs_into_GUI(bAdd=bAdd, bPlot=True) + if hasattr(self,'selPanel'): + self.selPanel.updateLayout(SEL_MODES_ID[self.comboMode.GetSelection()]) + + def load_tabs_into_GUI(self, bReload=False, bAdd=False, bPlot=True): + if bAdd: + if not hasattr(self,'selPanel'): + bAdd=False + + if (not bReload) and (not bAdd): + self.cleanGUI() + self.Freeze() + # Setting status bar + self.setStatusBar() + + if bReload or bAdd: + self.selPanel.update_tabs(self.tabList) + else: + mode = SEL_MODES_ID[self.comboMode.GetSelection()] + #self.vSplitter = wx.SplitterWindow(self.nb) + self.vSplitter = wx.SplitterWindow(self.MainPanel) + self.selPanel = SelectionPanel(self.vSplitter, self.tabList, mode=mode, mainframe=self) + self.tSplitter = wx.SplitterWindow(self.vSplitter) + #self.tSplitter.SetMinimumPaneSize(20) + self.infoPanel = InfoPanel(self.tSplitter) + self.plotPanel = PlotPanel(self.tSplitter, self.selPanel, self.infoPanel, self) + self.tSplitter.SetSashGravity(0.9) + self.tSplitter.SplitHorizontally(self.plotPanel, self.infoPanel) + self.tSplitter.SetMinimumPaneSize(BOT_PANL) + self.tSplitter.SetSashGravity(1) + self.tSplitter.SetSashPosition(400) + + self.vSplitter.SplitVertically(self.selPanel, self.tSplitter) + self.vSplitter.SetMinimumPaneSize(SIDE_COL[0]) + self.tSplitter.SetSashPosition(SIDE_COL[0]) + + #self.nb.AddPage(self.vSplitter, "Plot") + #self.nb.SendSizeEvent() + + sizer = self.MainPanel.GetSizer() + sizer.Add(self.vSplitter, 1, flag=wx.EXPAND,border=0) + self.MainPanel.SetSizer(sizer) + self.FrameSizer.Layout() + + self.Bind(wx.EVT_COMBOBOX, self.onColSelectionChange, self.selPanel.colPanel1.comboX ) + self.Bind(wx.EVT_LISTBOX , self.onColSelectionChange, self.selPanel.colPanel1.lbColumns) + self.Bind(wx.EVT_COMBOBOX, self.onColSelectionChange, self.selPanel.colPanel2.comboX ) + self.Bind(wx.EVT_LISTBOX , self.onColSelectionChange, self.selPanel.colPanel2.lbColumns) + self.Bind(wx.EVT_COMBOBOX, self.onColSelectionChange, self.selPanel.colPanel3.comboX ) + self.Bind(wx.EVT_LISTBOX , self.onColSelectionChange, self.selPanel.colPanel3.lbColumns) + self.Bind(wx.EVT_LISTBOX , self.onTabSelectionChange, self.selPanel.tabPanel.lbTab) + self.Bind(wx.EVT_SPLITTER_SASH_POS_CHANGED, self.onSashChangeMain, self.vSplitter) + + self.selPanel.tabPanel.lbTab.Bind(wx.EVT_RIGHT_DOWN, self.OnTabPopup) + + # plot trigger + if bPlot: + self.mainFrameUpdateLayout() + self.onColSelectionChange(event=None) + try: + self.Thaw() + except: + pass + # Hack + #self.onShowTool(tool='Resample') + + def setStatusBar(self, ISel=None): + nTabs=self.tabList.len() + if ISel is None: + ISel = list(np.arange(nTabs)) + if nTabs<0: + self.statusbar.SetStatusText('', 0) # Format + self.statusbar.SetStatusText('', 1) # Filenames + self.statusbar.SetStatusText('', 2) # Shape + elif nTabs==1: + self.statusbar.SetStatusText(self.tabList.get(0).fileformat.name, 0) + self.statusbar.SetStatusText(self.tabList.get(0).filename , 1) + self.statusbar.SetStatusText(self.tabList.get(0).shapestring, 2) + elif len(ISel)==1: + self.statusbar.SetStatusText(self.tabList.get(ISel[0]).fileformat.name , 0) + self.statusbar.SetStatusText(self.tabList.get(ISel[0]).filename , 1) + self.statusbar.SetStatusText(self.tabList.get(ISel[0]).shapestring, 2) + else: + self.statusbar.SetStatusText('' ,0) + self.statusbar.SetStatusText(", ".join(list(set([self.tabList.filenames[i] for i in ISel]))),1) + self.statusbar.SetStatusText('',2) + + def renameTable(self, iTab, newName): + oldName = self.tabList.renameTable(iTab, newName) + self.selPanel.renameTable(iTab, oldName, newName) + + def sortTabs(self, method='byName'): + self.tabList.sort(method=method) + # Updating tables + self.selPanel.update_tabs(self.tabList) + # Trigger a replot + self.onTabSelectionChange() + + + def deleteTabs(self, I): + self.tabList.deleteTabs(I) + + # Invalidating selections + self.selPanel.tabPanel.lbTab.SetSelection(-1) + # Until we have something better, we empty plot + self.plotPanel.empty() + self.infoPanel.empty() + self.selPanel.clean_memory() + # Updating tables + self.selPanel.update_tabs(self.tabList) + # Trigger a replot + self.onTabSelectionChange() + + def exportTab(self, iTab): + tab=self.tabList.get(iTab) + default_filename=tab.basename +'.csv' + with wx.FileDialog(self, "Save to CSV file",defaultFile=default_filename, + style=wx.FD_SAVE | wx.FD_OVERWRITE_PROMPT) as dlg: + #, wildcard="CSV files (*.csv)|*.csv", + dlg.CentreOnParent() + if dlg.ShowModal() == wx.ID_CANCEL: + return # the user changed their mind + tab.export(dlg.GetPath()) + + def onShowTool(self, event=None, tool=''): + """ + tool in 'Outlier', 'Filter', 'LogDec','FASTRadialAverage', 'Mask', 'CurveFitting' + """ + if not hasattr(self,'plotPanel'): + Error(self,'Plot some data first') + return + self.plotPanel.showTool(tool) + + def onSashChangeMain(self,event=None): + pass + # doent work because size is not communicated yet + #if hasattr(self,'selPanel'): + # print('ON SASH') + # self.selPanel.setEquiSash(event) + + def OnTabPopup(self,event): + menu = TablePopup(self,self.selPanel.tabPanel.lbTab) + self.PopupMenu(menu, event.GetPosition()) + menu.Destroy() + + def onTabSelectionChange(self,event=None): + # TODO This can be cleaned-up + ISel=self.selPanel.tabPanel.lbTab.GetSelections() + if len(ISel)>0: + # Letting seletion panel handle the change + self.selPanel.tabSelectionChanged() + # Update of status bar + self.setStatusBar(ISel) + # Trigger the colSelection Event + self.onColSelectionChange(event=None) + + def onColSelectionChange(self,event=None): + if hasattr(self,'plotPanel'): + # Letting selection panel handle the change + self.selPanel.colSelectionChanged() + # Redrawing + self.plotPanel.load_and_draw() + # --- Stats trigger + #self.showStats() + + def redraw(self): + if hasattr(self,'plotPanel'): + self.plotPanel.load_and_draw() +# def showStats(self): +# self.infoPanel.showStats(self.plotPanel.plotData,self.plotPanel.pltTypePanel.plotType()) + + def onExit(self, event): + self.Close() + + def cleanGUI(self, event=None): + if hasattr(self,'plotPanel'): + del self.plotPanel + if hasattr(self,'selPanel'): + del self.selPanel + if hasattr(self,'infoPanel'): + del self.infoPanel + #self.deletePages() + try: + self.MainPanel.GetSizer().Clear(delete_windows=True) # Delete Windows + except: + self.MainPanel.GetSizer().Clear() + self.FrameSizer.Layout() + gc.collect() + + def onSave(self, event=None): + # using the navigation toolbar save functionality + self.plotPanel.navTB.save_figure() + + def onAbout(self, event=None): + Info(self,PROG_NAME+' '+PROG_VERSION+'\n\nVisit http://github.com/ebranlard/pyDatView for documentation.') + + def onReload(self, event=None): + filenames, fileformats = self.tabList.filenames_and_formats + if len(filenames)>0: + # Save formulas to restore them after reload with sorted tabs + self.restore_formulas = {} + for tab in self.tabList._tabs: + f = tab.formulas # list of dict('pos','formula','name') + f = sorted(f, key=lambda k: k['pos']) # Sort formulae by position in list of formua + self.restore_formulas[tab.raw_name]=f # we use raw_name as key + # Actually load files (read and add in GUI) + self.load_files(filenames, fileformats=fileformats, bReload=True,bAdd=False) + else: + Error(self,'Open one or more file first.') + + def onDEBUG(self, event=None): + #self.clean_memory() + self.plotPanel.ctrlPanel.Refresh() + self.plotPanel.cb_sizer.ForceRefresh() + + def onExport(self, event=None): + ISel=[] + try: + ISel = self.selPanel.tabPanel.lbTab.GetSelections() + except: + pass + if len(ISel)>0: + self.exportTab(ISel[0]) + else: + Error(self,'Open a file and select a table first.') + + def onLoad(self, event=None): + self.selectFile(bAdd=False) + + def onAdd(self, event=None): + self.selectFile(bAdd=self.tabList.len()>0) + + def selectFile(self,bAdd=False): + # --- File Format extension + iFormat=self.comboFormats.GetSelection() + sFormat=self.comboFormats.GetStringSelection() + if iFormat==0: # auto-format + Format = None + #wildcard = 'all (*.*)|*.*' + wildcard='|'.join([n+'|*'+';*'.join(e) for n,e in zip(FILE_FORMATS_NAMEXT,FILE_FORMATS_EXTENSIONS)]) + #wildcard = sFormat + extensions+'|all (*.*)|*.*' + else: + Format = FILE_FORMATS[iFormat-1] + extensions = '|*'+';*'.join(FILE_FORMATS[iFormat-1].extensions) + wildcard = sFormat + extensions+'|all (*.*)|*.*' + + with wx.FileDialog(self, "Open file", wildcard=wildcard, + style=wx.FD_OPEN | wx.FD_FILE_MUST_EXIST | wx.FD_MULTIPLE) as dlg: + #other options: wx.CHANGE_DIR + #dlg.SetSize((100,100)) + #dlg.Center() + if dlg.ShowModal() == wx.ID_CANCEL: + return # the user changed their mind + self.load_files(dlg.GetPaths(),fileformat=Format,bAdd=bAdd) + + def onModeChange(self, event=None): + if hasattr(self,'selPanel'): + self.selPanel.updateLayout(SEL_MODES_ID[self.comboMode.GetSelection()]) + self.mainFrameUpdateLayout() + # --- Trigger to check number of columns + self.onTabSelectionChange() + + def mainFrameUpdateLayout(self, event=None): + if hasattr(self,'selPanel'): + nWind=self.selPanel.splitter.nWindows + if self.Size[0]<=800: + sash=SIDE_COL[nWind] + else: + sash=SIDE_COL_LARGE[nWind] + self.resizeSideColumn(sash) + + def OnResizeWindow(self, event): + try: + self.mainFrameUpdateLayout() + self.Layout() + except: + pass + # NOTE: doesn't work... + #if hasattr(self,'plotPanel'): + # Subplot spacing changes based on figure size + #print('>>> RESIZE WINDOW') + #self.redraw() + + # --- Side column + def resizeSideColumn(self,width): + # To force the replot we do an epic unsplit/split... + #self.vSplitter.Unsplit() + #self.vSplitter.SplitVertically(self.selPanel, self.tSplitter) + self.vSplitter.SetMinimumPaneSize(width) + self.vSplitter.SetSashPosition(width) + #self.selPanel.splitter.setEquiSash() + + # --- NOTEBOOK + #def deletePages(self): + # for index in reversed(range(self.nb.GetPageCount())): + # self.nb.DeletePage(index) + # self.nb.SendSizeEvent() + # gc.collect() + #def on_tab_change(self, event=None): + # page_to_select = event.GetSelection() + # wx.CallAfter(self.fix_focus, page_to_select) + # event.Skip(True) + #def fix_focus(self, page_to_select): + # page = self.nb.GetPage(page_to_select) + # page.SetFocus() + +#---------------------------------------------------------------------- +def MyExceptionHook(etype, value, trace): + """ + Handler for all unhandled exceptions. + :param `etype`: the exception type (`SyntaxError`, `ZeroDivisionError`, etc...); + :type `etype`: `Exception` + :param string `value`: the exception error message; + :param string `trace`: the traceback header, if any (otherwise, it prints the + standard Python header: ``Traceback (most recent call last)``. + """ + from wx._core import wxAssertionError + # Printing exception + traceback.print_exception(etype, value, trace) + if etype==wxAssertionError: + if wx.Platform == '__WXMAC__': + # We skip these exceptions on macos (likely bitmap size 0) + return + # Then showing to user the last error + frame = wx.GetApp().GetTopWindow() + tmp = traceback.format_exception(etype, value, trace) + if tmp[-1].find('Exception: Error:')==0: + Error(frame,tmp[-1][18:]) + elif tmp[-1].find('Exception: Warn:')==0: + Warn(frame,tmp[-1][17:]) + else: + exception = 'The following exception occured:\n\n'+ tmp[-1] + '\n'+tmp[-2].strip() + Error(frame,exception) + try: + frame.Thaw() # Make sure any freeze event is stopped + except: + pass + +# --------------------------------------------------------------------------------} +# --- Tests +# --------------------------------------------------------------------------------{ +def test(filenames=None): + if filenames is not None: + app = wx.App(False) + frame = MainFrame() + frame.load_files(filenames,fileformat=None) + return + +# --------------------------------------------------------------------------------} +# --- Wrapped WxApp +# --------------------------------------------------------------------------------{ +class MyWxApp(wx.App): + def __init__(self, redirect=False, filename=None): + try: + wx.App.__init__(self, redirect, filename) + except: + if wx.Platform == '__WXMAC__': + #msg = """This program needs access to the screen. + # Please run with 'pythonw', not 'python', and only when you are logged + # in on the main display of your Mac.""" + msg= """ +MacOS Error: + This program needs access to the screen. Please run with a + Framework build of python, and only when you are logged in + on the main display of your Mac. + +pyDatView help: + You see the error above because you are using a Mac and + the python executable you are using does not have access to + your screen. This is a Mac issue, not a pyDatView issue. + Instead of calling 'python pyDatView.py', you need to find + another python and do '/path/python pyDatView.py' + You can try './pythonmac pyDatView.py', a script provided + in this repository to detect the path (in some cases) + + You can find additional help in the file 'README.md'. + + For quick reference, here are some typical cases: + - Your python was installed with 'brew', then likely use + /usr/lib/Cellar/python/XXXXX/Frameworks/python.framework/Versions/XXXX/bin/pythonXXX; + - Your python is an anaconda python, use something like:; + /anaconda3/bin/python.app (NOTE: the '.app'! + - You are using a python 2 version, you can use the system one: + /Library/Frameworks/Python.framework/Versions/XXX/bin/pythonXXX + /System/Library/Frameworks/Python.framework/Versions/XXX/bin/pythonXXX +""" + + elif wx.Platform == '__WXGTK__': + msg =""" +Error: + Unable to access the X Display, is $DISPLAY set properly? + +pyDatView help: + You are probably running this application on a server accessed via ssh. + Use `ssh -X` or `ssh -Y` to access the server. + Else, try setting up $DISPLAY before doing the ssh connection. +""" + else: + msg = 'Unable to create GUI' # TODO: more description is needed for wxMSW... + raise SystemExit(msg) + +# --------------------------------------------------------------------------------} +# --- Mains +# --------------------------------------------------------------------------------{ +def showApp(firstArg=None,dataframe=None,filenames=[]): + """ + The main function to start the data frame GUI. + """ + app = MyWxApp(False) + frame = MainFrame() + # Optional first argument + if firstArg is not None: + if isinstance(firstArg,list): + filenames=firstArg + elif isinstance(firstArg,str): + filenames=[firstArg] + elif isinstance(firstArg, pd.DataFrame): + dataframe=firstArg + # + if (dataframe is not None) and (len(dataframe)>0): + #import time + #tstart = time.time() + frame.load_df(dataframe) + #tend = time.time() + #print('PydatView time: ',tend-tstart) + elif len(filenames)>0: + frame.load_files(filenames, fileformats=None) + app.MainLoop() + +def cmdline(): + if len(sys.argv)>1: + pydatview(filename=sys.argv[1]) + else: + pydatview() diff --git a/tests/test_Tables.py b/tests/test_Tables.py index 29712d6..52fd3f0 100644 --- a/tests/test_Tables.py +++ b/tests/test_Tables.py @@ -1,46 +1,70 @@ -import unittest -import numpy as np -import pandas as pd -from pydatview.Tables import Table -import os - - - -class TestTable(unittest.TestCase): - - @classmethod - def setUpClass(cls): - d ={'ColA': np.linspace(0,1,100)+1,'ColB': np.random.normal(0,1,100)+0} - cls.df1 = pd.DataFrame(data=d) - d ={'ColA': np.linspace(0,1,100)+1,'ColB': np.random.normal(0,1,100)+0} - cls.df2 = pd.DataFrame(data=d) - - def test_table_name(self): - print(' ') - print(' ') - t1=Table(data=self.df1) - print(t1) - print(' ') - print(' ') - # Typically pyDatView adds tables like this: - # - # self.tabList.load_tables_from_files(filenames=filenames, fileformat=fileformat, bAdd=bAdd) - # - # if len(dfs)>0: - # tabs=[Table(df=dfs, name='default', filename=filename, fileformat=F.formatName())] - # else: - # for k in list(dfs.keys()): - # if len(dfs[k])>0: - # tabs.append(Table(df=dfs[k], name=k, filename=filename, fileformat=F.formatName())) - # OR - # if bAdd: - # self.tabList.append(Table(df=df, name=name)) - # else: - # self.tabList = TableList( [Table(df=df, name=name)] ) - # - # Tools add dfs like this to the GUI: - # self.tabList.from_dataframes(dataframes=dfs, names=names, bAdd=bAdd) - # - -if __name__ == '__main__': - unittest.main() +import unittest +import numpy as np +import pandas as pd +from pydatview.Tables import Table, TableList +import os + + + +class TestTable(unittest.TestCase): + + @classmethod + def setUpClass(cls): + d ={'ColA': np.linspace(0,1,100)+1,'ColB': np.random.normal(0,1,100)+0} + cls.df1 = pd.DataFrame(data=d) + d ={'ColA': np.linspace(0,1,100)+1,'ColB': np.random.normal(0,1,100)+0} + cls.df2 = pd.DataFrame(data=d) + + cls.scriptdir = os.path.dirname(__file__) + + def test_table_name(self): + t1=Table(data=self.df1) + self.assertEqual(t1.raw_name, 'default') + # Typically pyDatView adds tables like this: + # + # self.tabList.load_tables_from_files(filenames=filenames, fileformat=fileformat, bAdd=bAdd) + # + # if len(dfs)>0: + # tabs=[Table(df=dfs, name='default', filename=filename, fileformat=F.formatName())] + # else: + # for k in list(dfs.keys()): + # if len(dfs[k])>0: + # tabs.append(Table(df=dfs[k], name=k, filename=filename, fileformat=F.formatName())) + # OR + # if bAdd: + # self.tabList.append(Table(df=df, name=name)) + # else: + # self.tabList = TableList( [Table(df=df, name=name)] ) + # + # Tools add dfs like this to the GUI: + # self.tabList.from_dataframes(dataframes=dfs, names=names, bAdd=bAdd) + # + + + def test_load_files_misc_formats(self): + tablist = TableList() + files =[ + os.path.join(self.scriptdir,'../weio/weio/tests/example_files/CSVComma.csv'), + os.path.join(self.scriptdir,'../weio/weio/tests/example_files/HAWCStab2.pwr') + ] + # --- First read without fileformats + tablist.load_tables_from_files(filenames=files, fileformats=None, bAdd=False) + #print(tablist.fileformats) + + # --- Test iteration on tablist in passing.. + ffname1=[tab.fileformat.name for tab in tablist] + + # --- Then read with prescribed fileformats + fileformats1 = tablist.fileformats + tablist.load_tables_from_files(filenames=files, fileformats=fileformats1, bAdd=False) + ffname2 = [ff.name for ff in tablist.fileformats] + + self.assertEqual(ffname1, ffname2) + + + +if __name__ == '__main__': +# TestTable.setUpClass() +# tt= TestTable() +# tt.test_load_files_misc_formats() + unittest.main() From 254bdb821e55d1637980ebf2519196ef72388628 Mon Sep 17 00:00:00 2001 From: Emmanuel Branlard Date: Wed, 2 Feb 2022 01:07:16 -0700 Subject: [PATCH 20/36] Scaling units to SI or Wind Energy from the data menu (Closes #60), starting notion of plugins --- Makefile | 172 ++++++------ pydatview/Tables.py | 18 +- pydatview/common.py | 12 + pydatview/main.py | 9 + pydatview/plugins/__init__.py | 24 ++ pydatview/plugins/data_standardizeUnits.py | 107 ++++++++ pydatview/plugins/tests/__init__.py | 0 .../plugins/tests/test_standardizeUnits.py | 24 ++ tests/test_Tables.py | 13 + tests/test_common.py | 249 +++++++++--------- 10 files changed, 418 insertions(+), 210 deletions(-) create mode 100644 pydatview/plugins/__init__.py create mode 100644 pydatview/plugins/data_standardizeUnits.py create mode 100644 pydatview/plugins/tests/__init__.py create mode 100644 pydatview/plugins/tests/test_standardizeUnits.py diff --git a/Makefile b/Makefile index 491d533..b3893d6 100644 --- a/Makefile +++ b/Makefile @@ -1,85 +1,87 @@ -# --- Detecting OS -ifeq '$(findstring ;,$(PATH))' ';' - detected_OS := Windows -else - detected_OS := $(shell uname 2>/dev/null || echo Unknown) - detected_OS := $(patsubst CYGWIN%,Cygwin,$(detected_OS)) - detected_OS := $(patsubst MSYS%,MSYS,$(detected_OS)) - detected_OS := $(patsubst MINGW%,MSYS,$(detected_OS)) -endif - -testfile= weio/weio/tests/example_files/FASTIn_arf_coords.txt -all: -ifeq ($(detected_OS),Darwin) # Mac OS X - ./pythonmac pyDatView.py $(testfile) -else - python pyDatView.py $(testfile) -endif - - - - -deb: - python DEBUG.py - -install: - python setup.py install - -dep: - python -m pip install -r requirements.txt - -pull: - git pull --recurse-submodules -update:pull - - -help: - @echo "Available rules:" - @echo " all run the standalone program" - @echo " install install the python package in the system" - @echo " dep download the dependencies " - @echo " pull download the latest version " - @echo " test run the unit tests " - -test: -ifeq ($(detected_OS),Darwin) # Mac OS X - ./pythonmac -m unittest discover -v tests -else - python -m unittest discover -v tests -endif - -prof: - python -m cProfile -o tests/prof_all.prof tests/prof_all.py - python -m pyprof2calltree -i tests/prof_all.prof -o tests/callgrind.prof_all.prof - snakeviz tests/prof_all.prof - - -exe: - python -m nuitka --follow-imports --include-plugin-directory --include-plugin-files --show-progress --show-modules --output-dir=build-nuitka pyDatView.py - -exestd: - python -m nuitka --python-flag=no_site --assume-yes-for-downloads --standalone --follow-imports --include-plugin-directory --include-plugin-files --show-progress --show-modules --output-dir=build-nuitka-std pyDatView.py - -clean: - rm -rf __pycache__ - rm -rf *.egg-info - rm -rf *.spec - rm -rf build* - rm -rf dist - - -pyexe: - pyinstaller --onedir pyDatView.py - -version: -ifeq ($(OS),Windows_NT) - @echo "Doing nothing" -else - @sh _tools/setVersion.sh -endif - -installer: version - python -m nsist installer.cfg - - - +# --- Detecting OS +ifeq '$(findstring ;,$(PATH))' ';' + detected_OS := Windows +else + detected_OS := $(shell uname 2>/dev/null || echo Unknown) + detected_OS := $(patsubst CYGWIN%,Cygwin,$(detected_OS)) + detected_OS := $(patsubst MSYS%,MSYS,$(detected_OS)) + detected_OS := $(patsubst MINGW%,MSYS,$(detected_OS)) +endif + +testfile= weio/weio/tests/example_files/FASTIn_arf_coords.txt +all: +ifeq ($(detected_OS),Darwin) # Mac OS X + ./pythonmac pyDatView.py $(testfile) +else + python pyDatView.py $(testfile) +endif + + + + +deb: + python DEBUG.py + +install: + python setup.py install + +dep: + python -m pip install -r requirements.txt + +pull: + git pull --recurse-submodules +update:pull + + +help: + @echo "Available rules:" + @echo " all run the standalone program" + @echo " install install the python package in the system" + @echo " dep download the dependencies " + @echo " pull download the latest version " + @echo " test run the unit tests " + +test: +ifeq ($(detected_OS),Darwin) # Mac OS X + ./pythonmac -m unittest discover -v tests + ./pythonmac -m unittest discover -v pydatview/plugins/tests +else + python -m unittest discover -v tests + python -m unittest discover -v pydatview/plugins/tests +endif + +prof: + python -m cProfile -o tests/prof_all.prof tests/prof_all.py + python -m pyprof2calltree -i tests/prof_all.prof -o tests/callgrind.prof_all.prof + snakeviz tests/prof_all.prof + + +exe: + python -m nuitka --follow-imports --include-plugin-directory --include-plugin-files --show-progress --show-modules --output-dir=build-nuitka pyDatView.py + +exestd: + python -m nuitka --python-flag=no_site --assume-yes-for-downloads --standalone --follow-imports --include-plugin-directory --include-plugin-files --show-progress --show-modules --output-dir=build-nuitka-std pyDatView.py + +clean: + rm -rf __pycache__ + rm -rf *.egg-info + rm -rf *.spec + rm -rf build* + rm -rf dist + + +pyexe: + pyinstaller --onedir pyDatView.py + +version: +ifeq ($(OS),Windows_NT) + @echo "Doing nothing" +else + @sh _tools/setVersion.sh +endif + +installer: version + python -m nsist installer.cfg + + + diff --git a/pydatview/Tables.py b/pydatview/Tables.py index 16900ac..88fb00a 100644 --- a/pydatview/Tables.py +++ b/pydatview/Tables.py @@ -388,13 +388,14 @@ def __repr__(self): s+=' - filename : {}\n'.format(self.filename) s+=' - fileformat : {}\n'.format(self.fileformat) s+=' - fileformat_name : {}\n'.format(self.fileformat_name) + s+=' - columns : {}\n'.format(self.columns) s+=' - nCols x nRows: {}x{}\n'.format(self.nCols, self.nRows) return s def columnsFromDF(self,df): return [s.replace('_',' ') for s in df.columns.values.astype(str)] - + # --- Mask def clearMask(self): self.maskString='' self.mask=None @@ -428,6 +429,7 @@ def applyMaskString(self,maskString,bAdd=True): raise Exception('Error: The mask failed for table: '+self.name) return df_new, name_new + # --- Important manipulation def applyResampling(self,iCol,sampDict,bAdd=True): from pydatview.tools.signal import applySamplerDF if iCol==0: @@ -442,7 +444,6 @@ def applyResampling(self,iCol,sampDict,bAdd=True): self.data=df_new return df_new, name_new - def radialAvg(self,avgMethod, avgParam): import pydatview.fast.fastlib as fastlib import pydatview.fast.fastfarm as fastfarm @@ -484,6 +485,12 @@ def radialAvg(self,avgMethod, avgParam): names_new=[self.raw_name+'_AD', self.raw_name+'_ED', self.raw_name+'_BD'] return dfs_new, names_new + def changeUnits(self, flavor='WE'): + """ Change units of the table """ + # NOTE: moved to a plugin, but interface kept + from pydatview.plugins.data_standardizeUnits import changeUnits + changeUnits(self, flavor=flavor) + def convertTimeColumns(self): if len(self.data)>0: for i,c in enumerate(self.data.columns.values): @@ -519,6 +526,8 @@ def convertTimeColumns(self): print('>> Unknown type:',type(y.values[0])) #print(self.data.dtypes) + + # --- Column manipulations def renameColumn(self,iCol,newName): self.columns[iCol]=newName self.data.columns.values[iCol]=newName @@ -566,6 +575,8 @@ def setColumn(self,sNewName,NewCol,i,sFormula=''): def getColumn(self,i): """ Return column of data, where i=0 is the index column If a mask exist, the mask is applied + + TODO TODO TODO get rid of this! """ if i <= 0 : x = np.array(range(self.data.shape[0])) @@ -596,7 +607,6 @@ def getColumn(self,i): return x,isString,isDate,c - def evalFormula(self,sFormula): df = self.data Index = np.array(range(df.shape[0])) @@ -639,6 +649,7 @@ def export(self,path): + # --- Properties @property def basename(self): return os.path.splitext(os.path.basename(self.filename))[0] @@ -651,7 +662,6 @@ def shapestring(self): def shape(self): return (self.nRows, self.nCols) - @property def columns_clean(self): return [no_unit(s) for s in self.columns] diff --git a/pydatview/common.py b/pydatview/common.py index acda0c1..c389fcd 100644 --- a/pydatview/common.py +++ b/pydatview/common.py @@ -252,6 +252,9 @@ def getTabCommonColIndices(tabs): return IKeepPerTab, IMissPerTab, IDuplPerTab, nCols +# --------------------------------------------------------------------------------} +# --- Units +# --------------------------------------------------------------------------------{ def cleanCol(s): s=no_unit(s).strip() s=no_unit(s.replace('(',' [').replace(')',']')) @@ -273,6 +276,13 @@ def unit(s): else: return '' +def splitunit(s): + iu=s.rfind('[') + if iu>1: + return s[:iu], s[iu+1:].replace(']','') + else: + return s, '' + def inverse_unit(s): u=unit(s).strip() if u=='': @@ -288,6 +298,8 @@ def inverse_unit(s): else: return '1/('+u+')' + + def filter_list(L, string): """ simple (not regex or fuzzy) filtering of a list of strings Returns matched indices and strings diff --git a/pydatview/main.py b/pydatview/main.py index 34e8240..eb71e5b 100644 --- a/pydatview/main.py +++ b/pydatview/main.py @@ -35,6 +35,8 @@ # Helper from .common import * from .GUICommon import * +# Pluggins +from .plugins import dataPlugins @@ -142,6 +144,10 @@ def __init__(self, filename=None): self.Bind(wx.EVT_MENU, lambda e: self.onShowTool(e,'Resample') , dataMenu.Append(wx.ID_ANY, 'Resample')) self.Bind(wx.EVT_MENU, lambda e: self.onShowTool(e,'FASTRadialAverage'), dataMenu.Append(wx.ID_ANY, 'FAST - Radial average')) + # --- Data Plugins + for string, function in dataPlugins: + self.Bind(wx.EVT_MENU, lambda e, s_loc=string: function(self, e, s_loc), dataMenu.Append(wx.ID_ANY, string)) + toolMenu = wx.Menu() menuBar.Append(toolMenu, "&Tools") self.Bind(wx.EVT_MENU,lambda e: self.onShowTool(e, 'CurveFitting'), toolMenu.Append(wx.ID_ANY, 'Curve fitting')) @@ -221,6 +227,8 @@ def __init__(self, filename=None): ) self.SetAcceleratorTable(accel_tbl) + def printString(self, event=None, string=''): + print('>>> string',string) def onFilter(self,event): if hasattr(self,'selPanel'): @@ -417,6 +425,7 @@ def exportTab(self, iTab): def onShowTool(self, event=None, tool=''): """ + Show tool tool in 'Outlier', 'Filter', 'LogDec','FASTRadialAverage', 'Mask', 'CurveFitting' """ if not hasattr(self,'plotPanel'): diff --git a/pydatview/plugins/__init__.py b/pydatview/plugins/__init__.py new file mode 100644 index 0000000..0e5b4b1 --- /dev/null +++ b/pydatview/plugins/__init__.py @@ -0,0 +1,24 @@ +""" +Register your plugins in this file: + 1) add a function that calls your plugin. The signature needs to be: + def _function_name(mainframe, event=None, label='') + The corpus of this function should import your package + and call the main function of your package. Your are free + to use the signature your want for your package + + 2) add a tuple to the variable dataPlugins of the form + (string, _function_name) + + where string will be displayed under the data menu of pyDatView. + +See working examples in this file and this directory. +""" + +def _data_standardizeUnits(mainframe, event=None, label=''): + from .data_standardizeUnits import standardizeUnitsPlugin + standardizeUnitsPlugin(mainframe, event, label) + +dataPlugins=[ + ('Standardize Units (SI)', _data_standardizeUnits), + ('Standardize Units (WE)', _data_standardizeUnits), + ] diff --git a/pydatview/plugins/data_standardizeUnits.py b/pydatview/plugins/data_standardizeUnits.py new file mode 100644 index 0000000..b08fcbf --- /dev/null +++ b/pydatview/plugins/data_standardizeUnits.py @@ -0,0 +1,107 @@ +import unittest +import numpy as np +from pydatview.common import splitunit + +def standardizeUnitsPlugin(mainframe, event=None, label='Standardize Units (SI)'): + """ + Main entry point of the plugin + """ + flavor = label.split('(')[1][0:2] + + for t in mainframe.tabList: + changeUnits(t, flavor=flavor) + + if hasattr(mainframe,'selPanel'): + mainframe.selPanel.colPanel1.setColumns() + mainframe.selPanel.colPanel2.setColumns() + mainframe.selPanel.colPanel3.setColumns() + mainframe.onTabSelectionChange() # trigger replot + +def changeUnits(tab, flavor='SI'): + """ Change units of a table + NOTE: it relies on the Table class, which may change interface in the future.. + """ + if flavor=='WE': + for i, colname in enumerate(tab.columns): + colname, tab.data.iloc[:,i] = change_units_to_WE(colname, tab.data.iloc[:,i]) + tab.columns[i] = colname # TODO, use a dataframe everywhere.. + tab.data.columns = tab.columns + elif flavor=='SI': + for i, colname in enumerate(tab.columns): + colname, tab.data.iloc[:,i] = change_units_to_SI(colname, tab.data.iloc[:,i]) + tab.columns[i] = colname # TODO, use a dataframe everywhere.. + tab.data.columns = tab.columns + else: + raise NotImplementedError(flavor) + + +def change_units_to_WE(s, c): + """ + Change units to wind energy units + s: channel name (string) containing units, typically 'speed_[rad/s]' + c: channel (array) + """ + svar, u = splitunit(s) + u=u.lower() + scalings = {} + scalings['rad/s'] = (30/np.pi,'rpm') # TODO decide + scalings['rad' ] = (180/np.pi,'deg') + scalings['n'] = (1e-3, 'kN') + scalings['nm'] = (1e-3, 'kNm') + scalings['n-m'] = (1e-3, 'kNm') + scalings['n*m'] = (1e-3, 'kNm') + scalings['w'] = (1e-3, 'kW') + if u in scalings.keys(): + scale, new_unit = scalings[u] + s = svar+'['+new_unit+']' + c *= scale + return s, c + +def change_units_to_SI(s, c): + """ + Change units to SI units + TODO, a lot more units conversion needed...will add them as we go + s: channel name (string) containing units, typically 'speed_[rad/s]' + c: channel (array) + """ + svar, u = splitunit(s) + u=u.lower() + scalings = {} + scalings['rpm'] = (np.pi/30,'rad/s') + scalings['rad' ] = (180/np.pi,'deg') + scalings['kn'] = (1e3, 'N') + scalings['knm'] = (1e3, 'Nm') + scalings['kn-m'] = (1e3, 'Nm') + scalings['kn*m'] = (1e3, 'Nm') + scalings['kw'] = (1e3, 'W') + if u in scalings.keys(): + scale, new_unit = scalings[u] + s = svar+'['+new_unit+']' + c *= scale + return s, c + + + + + +class TestChangeUnits(unittest.TestCase): + + def test_change_units(self): + import pandas as pd + from pydatview.Tables import Table + data = np.ones((1,3)) + data[:,0] *= 2*np.pi/60 # rad/s + data[:,1] *= 2000 # N + data[:,2] *= 10*np.pi/180 # rad + df = pd.DataFrame(data=data, columns=['om [rad/s]','F [N]', 'angle_[rad]']) + tab=Table(data=df) + changeUnits(tab, flavor='WE') + np.testing.assert_almost_equal(tab.data.values[:,0],[1]) + np.testing.assert_almost_equal(tab.data.values[:,1],[2]) + np.testing.assert_almost_equal(tab.data.values[:,2],[10]) + self.assertEqual(tab.columns, ['om [rpm]', 'F [kN]', 'angle [deg]']) + raise Exception('>>>>>>>>>>>>') + + +if __name__ == '__main__': + unittest.main() diff --git a/pydatview/plugins/tests/__init__.py b/pydatview/plugins/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pydatview/plugins/tests/test_standardizeUnits.py b/pydatview/plugins/tests/test_standardizeUnits.py new file mode 100644 index 0000000..35843c3 --- /dev/null +++ b/pydatview/plugins/tests/test_standardizeUnits.py @@ -0,0 +1,24 @@ +import unittest +import numpy as np +import pandas as pd +from pydatview.plugins.data_standardizeUnits import changeUnits + +class TestChangeUnits(unittest.TestCase): + + def test_change_units(self): + from pydatview.Tables import Table + data = np.ones((1,3)) + data[:,0] *= 2*np.pi/60 # rad/s + data[:,1] *= 2000 # N + data[:,2] *= 10*np.pi/180 # rad + df = pd.DataFrame(data=data, columns=['om [rad/s]','F [N]', 'angle_[rad]']) + tab=Table(data=df) + changeUnits(tab, flavor='WE') + np.testing.assert_almost_equal(tab.data.values[:,0],[1]) + np.testing.assert_almost_equal(tab.data.values[:,1],[2]) + np.testing.assert_almost_equal(tab.data.values[:,2],[10]) + self.assertEqual(tab.columns, ['om [rpm]', 'F [kN]', 'angle [deg]']) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_Tables.py b/tests/test_Tables.py index 52fd3f0..0b11459 100644 --- a/tests/test_Tables.py +++ b/tests/test_Tables.py @@ -62,6 +62,19 @@ def test_load_files_misc_formats(self): self.assertEqual(ffname1, ffname2) + def test_change_units(self): + data = np.ones((1,3)) + data[:,0] *= 2*np.pi/60 # rad/s + data[:,1] *= 2000 # N + data[:,2] *= 10*np.pi/180 # rad + df = pd.DataFrame(data=data, columns=['om [rad/s]','F [N]', 'angle_[rad]']) + tab=Table(data=df) + tab.changeUnits() + np.testing.assert_almost_equal(tab.data.values[:,0],[1]) + np.testing.assert_almost_equal(tab.data.values[:,1],[2]) + np.testing.assert_almost_equal(tab.data.values[:,2],[10]) + self.assertEqual(tab.columns, ['om [rpm]', 'F [kN]', 'angle [deg]']) + if __name__ == '__main__': # TestTable.setUpClass() diff --git a/tests/test_common.py b/tests/test_common.py index 3f6421c..1e5ab9b 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -1,121 +1,128 @@ -# - *- coding: utf- 8 - *- -from __future__ import unicode_literals,print_function -import unittest -import numpy as np -import pandas as pd -from pydatview.common import unit,no_unit,ellude_common,getDt, find_leftstop -from pydatview.common import has_chinese_char -from pydatview.common import filter_list -from pydatview.common import rectangleOverlap -import datetime - -class TestCommon(unittest.TestCase): - def assertEqual(self, first, second, msg=None): - #print('>',first,'<',' >',second,'<') - super(TestCommon, self).assertEqual(first, second, msg) - - def test_unit(self): - self.assertEqual(unit ('speed [m/s]'),'m/s' ) - self.assertEqual(unit ('speed [m/s' ),'m/s' ) # ... - self.assertEqual(no_unit('speed [m/s]'),'speed') - - def test_date(self): - def test_dt(datestr,dt_ref): - def myassert(x): - if np.isnan(dt_ref): - self.assertTrue(np.isnan(getDt(x))) - else: - self.assertEqual(getDt(x),dt_ref) - # Type: Numpy array - Elements: datetime64 - if isinstance(datestr[0],int): - x=np.array(datestr, dtype='datetime64[s]') - myassert(x) - - x=np.array(datestr) - myassert(x) - elif isinstance(datestr[0],float): - x=np.array(datestr) - myassert(x) - else: - x=np.array(datestr, dtype='datetime64') - myassert(x) - # Type: Pandas DatetimeIndex - Elements: TimeSamp - df = pd.DataFrame(data=datestr) - x = pd.to_datetime(df.iloc[:,0].values) - myassert(x) - # Type: Numpy array - Elements: datetime.datetime - df = pd.DataFrame(data=datestr) - x = pd.to_datetime(df.iloc[:,0].values).to_pydatetime() - myassert(x) - - test_dt(['2008-01-01','2009-01-01'],24*366*3600); # year - test_dt(['2008-01-01','2008-02-01'],24*3600*31); #month - test_dt(['2000-10-15 01:00:00', '2000-10-15 02:00:00'],3600); # hour - test_dt(['2000-10-15 00:00:05.000001', '2000-10-15 00:00:05.000002'],0.000001);#mu s - test_dt([np.datetime64('NaT'),'2000-10-15 00:00:05.000001'],np.nan); - test_dt([np.datetime64('NaT'),'2000-10-15 00:00:05.000001', '2000-10-15 00:00:05.000002'],0.000001) - test_dt([0],np.nan) - test_dt([0.0],np.nan) -# test_dt([0,1],1) # TODO -# test_dt([0.0,1.0],1.0) # TODO - self.assertEqual(getDt([0.0,0.1]),0.1) - self.assertEqual(getDt(np.array([0.0,0.1])),0.1) - self.assertEqual(getDt([0,1]),1) - self.assertEqual(getDt(np.array([0,1])),1) - - - def test_leftstop(self): - self.assertEqual(find_leftstop('A' ),'A' ) - self.assertEqual(find_leftstop('_' ),'' ) - self.assertEqual(find_leftstop('A_' ),'A' ) - self.assertEqual(find_leftstop('_B' ),'' ) - self.assertEqual(find_leftstop('ABC' ),'ABC') - self.assertEqual(find_leftstop('AB_D'),'AB' ) - self.assertEqual(find_leftstop('AB.D'),'AB' ) - - - def test_ellude(self): - print('') - print('') - self.assertListEqual(ellude_common(['>AA' ,'>AB'] ),['AA' ,'AB'] ) - self.assertListEqual(ellude_common(['AAA' ,'AAA_raw']),['AAA' ,'AAA_raw']) - self.assertListEqual(ellude_common(['A_.txt','A.txt'] ),['A_' ,'A'] ) - self.assertListEqual(ellude_common(['A_' ,'A'] ),['A_' ,'A'] ) - self.assertListEqual(ellude_common(['ABCDA_','ABCDAA'] ),['ABCDA_','ABCDAA'] ) - S=['C:|A_BD', 'C:|A_BD_bld|DC', 'C:|A_BD_bld|BP'] - self.assertListEqual(ellude_common(S),['BD','BD_bld|DC','BD_bld|BP'] ) - self.assertListEqual(ellude_common(['C|FO' , 'C|FO_HD']) , ['FO' , 'FO_HD'] ) - self.assertListEqual(ellude_common(['CT_0.11' , 'CT_0.22']) , ['11' , '22'] ) # Unfortunate - self.assertListEqual(ellude_common(['CT_0.1' , 'CT_0.9']) , ['0.1' , '0.9'] ) - self.assertListEqual(ellude_common(['CT=0.1' , 'CT=0.9']) , ['CT=0.1' , 'CT=0.9'] ) - self.assertListEqual(ellude_common(['AAA' , 'ABA'] , minLength=-1) , ['A' , 'B'] ) - #print(ellude_common(['Farm.ifw.T1','Farm.ifw.T2'],minLength=2)) - #print('') - #print('') - - def test_chinese_char(self): - self.assertEqual(has_chinese_char('') ,False) - self.assertEqual(has_chinese_char('aaaa'),False) - self.assertEqual(has_chinese_char('aaæ—¶'),True ) - self.assertEqual(has_chinese_char('aæ—¶a'),True ) - - def test_filter(self): - L=['RotTrq_[kNm]','B1RootMy_[kNm]','B2RootMy_[kNm]','Power_[kW]'] - Lf, If = filter_list(L,'Root') - self.assertEqual(If,[1,2]) - Lf, If = filter_list(L,'ro') - self.assertEqual(If,[0,1,2]) - self.assertEqual(Lf[0],'RotTrq_[kNm]') - Lf, If = filter_list(L,'Kro') - self.assertEqual(len(If),0) - self.assertEqual(len(Lf),0) - - def test_rectangleOverlap(self): - self.assertEqual(rectangleOverlap(0,0,1,1,0,0,2,2) ,True) # rect1 contained - self.assertEqual(rectangleOverlap(-2,-2,1,1,0,0,1,1) ,True) # rect2 contained - self.assertEqual(rectangleOverlap(-2,-2, 1, 1,0,0,2,2),True) # overlap corner2 in - self.assertEqual(rectangleOverlap(-2,-2, 1, 1,-3,0,2,2),True) # overlap - self.assertEqual(rectangleOverlap(-2,-2,-1,-1,0,0,1,1),False) - -if __name__ == '__main__': - unittest.main() +# - *- coding: utf- 8 - *- +from __future__ import unicode_literals,print_function +import unittest +import numpy as np +import pandas as pd +from pydatview.common import unit, no_unit, splitunit +from pydatview.common import ellude_common, getDt, find_leftstop +from pydatview.common import has_chinese_char +from pydatview.common import filter_list +from pydatview.common import rectangleOverlap +import datetime + +class TestCommon(unittest.TestCase): + def assertEqual(self, first, second, msg=None): + #print('>',first,'<',' >',second,'<') + super(TestCommon, self).assertEqual(first, second, msg) + + def test_unit(self): + self.assertEqual(unit ('speed [m/s]'),'m/s' ) + self.assertEqual(unit ('speed [m/s' ),'m/s' ) # ... + self.assertEqual(no_unit('speed [m/s]'),'speed') + + def test_splitunit(self): + self.assertEqual(splitunit ('speed [m/s]'),('speed ','m/s' )) + self.assertEqual(splitunit ('speed [m/s' ),('speed ','m/s' )) + self.assertEqual(splitunit ('speed_[m/s]'),('speed_','m/s' )) + self.assertEqual(splitunit ('speed'),('speed','' )) + + def test_date(self): + def test_dt(datestr,dt_ref): + def myassert(x): + if np.isnan(dt_ref): + self.assertTrue(np.isnan(getDt(x))) + else: + self.assertEqual(getDt(x),dt_ref) + # Type: Numpy array - Elements: datetime64 + if isinstance(datestr[0],int): + x=np.array(datestr, dtype='datetime64[s]') + myassert(x) + + x=np.array(datestr) + myassert(x) + elif isinstance(datestr[0],float): + x=np.array(datestr) + myassert(x) + else: + x=np.array(datestr, dtype='datetime64') + myassert(x) + # Type: Pandas DatetimeIndex - Elements: TimeSamp + df = pd.DataFrame(data=datestr) + x = pd.to_datetime(df.iloc[:,0].values) + myassert(x) + # Type: Numpy array - Elements: datetime.datetime + df = pd.DataFrame(data=datestr) + x = pd.to_datetime(df.iloc[:,0].values).to_pydatetime() + myassert(x) + + test_dt(['2008-01-01','2009-01-01'],24*366*3600); # year + test_dt(['2008-01-01','2008-02-01'],24*3600*31); #month + test_dt(['2000-10-15 01:00:00', '2000-10-15 02:00:00'],3600); # hour + test_dt(['2000-10-15 00:00:05.000001', '2000-10-15 00:00:05.000002'],0.000001);#mu s + test_dt([np.datetime64('NaT'),'2000-10-15 00:00:05.000001'],np.nan); + test_dt([np.datetime64('NaT'),'2000-10-15 00:00:05.000001', '2000-10-15 00:00:05.000002'],0.000001) + test_dt([0],np.nan) + test_dt([0.0],np.nan) +# test_dt([0,1],1) # TODO +# test_dt([0.0,1.0],1.0) # TODO + self.assertEqual(getDt([0.0,0.1]),0.1) + self.assertEqual(getDt(np.array([0.0,0.1])),0.1) + self.assertEqual(getDt([0,1]),1) + self.assertEqual(getDt(np.array([0,1])),1) + + + def test_leftstop(self): + self.assertEqual(find_leftstop('A' ),'A' ) + self.assertEqual(find_leftstop('_' ),'' ) + self.assertEqual(find_leftstop('A_' ),'A' ) + self.assertEqual(find_leftstop('_B' ),'' ) + self.assertEqual(find_leftstop('ABC' ),'ABC') + self.assertEqual(find_leftstop('AB_D'),'AB' ) + self.assertEqual(find_leftstop('AB.D'),'AB' ) + + + def test_ellude(self): + print('') + print('') + self.assertListEqual(ellude_common(['>AA' ,'>AB'] ),['AA' ,'AB'] ) + self.assertListEqual(ellude_common(['AAA' ,'AAA_raw']),['AAA' ,'AAA_raw']) + self.assertListEqual(ellude_common(['A_.txt','A.txt'] ),['A_' ,'A'] ) + self.assertListEqual(ellude_common(['A_' ,'A'] ),['A_' ,'A'] ) + self.assertListEqual(ellude_common(['ABCDA_','ABCDAA'] ),['ABCDA_','ABCDAA'] ) + S=['C:|A_BD', 'C:|A_BD_bld|DC', 'C:|A_BD_bld|BP'] + self.assertListEqual(ellude_common(S),['BD','BD_bld|DC','BD_bld|BP'] ) + self.assertListEqual(ellude_common(['C|FO' , 'C|FO_HD']) , ['FO' , 'FO_HD'] ) + self.assertListEqual(ellude_common(['CT_0.11' , 'CT_0.22']) , ['11' , '22'] ) # Unfortunate + self.assertListEqual(ellude_common(['CT_0.1' , 'CT_0.9']) , ['0.1' , '0.9'] ) + self.assertListEqual(ellude_common(['CT=0.1' , 'CT=0.9']) , ['CT=0.1' , 'CT=0.9'] ) + self.assertListEqual(ellude_common(['AAA' , 'ABA'] , minLength=-1) , ['A' , 'B'] ) + #print(ellude_common(['Farm.ifw.T1','Farm.ifw.T2'],minLength=2)) + #print('') + #print('') + + def test_chinese_char(self): + self.assertEqual(has_chinese_char('') ,False) + self.assertEqual(has_chinese_char('aaaa'),False) + self.assertEqual(has_chinese_char('aaæ—¶'),True ) + self.assertEqual(has_chinese_char('aæ—¶a'),True ) + + def test_filter(self): + L=['RotTrq_[kNm]','B1RootMy_[kNm]','B2RootMy_[kNm]','Power_[kW]'] + Lf, If = filter_list(L,'Root') + self.assertEqual(If,[1,2]) + Lf, If = filter_list(L,'ro') + self.assertEqual(If,[0,1,2]) + self.assertEqual(Lf[0],'RotTrq_[kNm]') + Lf, If = filter_list(L,'Kro') + self.assertEqual(len(If),0) + self.assertEqual(len(Lf),0) + + def test_rectangleOverlap(self): + self.assertEqual(rectangleOverlap(0,0,1,1,0,0,2,2) ,True) # rect1 contained + self.assertEqual(rectangleOverlap(-2,-2,1,1,0,0,1,1) ,True) # rect2 contained + self.assertEqual(rectangleOverlap(-2,-2, 1, 1,0,0,2,2),True) # overlap corner2 in + self.assertEqual(rectangleOverlap(-2,-2, 1, 1,-3,0,2,2),True) # overlap + self.assertEqual(rectangleOverlap(-2,-2,-1,-1,0,0,1,1),False) + +if __name__ == '__main__': + unittest.main() From 62a1a1af23b112fb87f8bfe37fc9bc1c5ab5ac59 Mon Sep 17 00:00:00 2001 From: Emmanuel Branlard Date: Wed, 2 Feb 2022 17:21:05 -0700 Subject: [PATCH 21/36] Binning implemented as a Data tool. Also add a plottype in the future. (Closes #14) --- pydatview/GUIPlotPanel.py | 2800 +++++++++++++++-------------- pydatview/main.py | 31 +- pydatview/plotdata.py | 1609 ++++++++--------- pydatview/plugins/__init__.py | 10 +- pydatview/plugins/data_binning.py | 307 ++++ pydatview/tools/stats.py | 21 + 6 files changed, 2573 insertions(+), 2205 deletions(-) create mode 100644 pydatview/plugins/data_binning.py diff --git a/pydatview/GUIPlotPanel.py b/pydatview/GUIPlotPanel.py index 885cba4..75ae52d 100644 --- a/pydatview/GUIPlotPanel.py +++ b/pydatview/GUIPlotPanel.py @@ -1,1397 +1,1403 @@ -import os -import numpy as np -import wx -import wx.lib.buttons as buttons -import dateutil # required by matplotlib -#from matplotlib import pyplot as plt -import matplotlib -matplotlib.use('wxAgg') # Important for Windows version of installer. NOTE: changed from Agg to wxAgg -from matplotlib import rc as matplotlib_rc -try: - from matplotlib.backends.backend_wxagg import FigureCanvasWxAgg as FigureCanvas -except Exception as e: - print('') - print('Error: problem importing `matplotlib.backends.backend_wx`.') - import platform - if platform.system()=='Darwin': - print('') - print('pyDatView help:') - print(' This is a typical issue on MacOS, most likely you are') - print(' using the native MacOS python with the native matplolib') - print(' library, which is incompatible with `wxPython`.') - print('') - print(' You can solve this by either:') - print(' - using python3, and pip3 e.g. installing it with brew') - print(' - using a virtual environment with python 2 or 3') - print(' - using anaconda with python 2 or 3'); - print('') - import sys - sys.exit(1) - else: - raise e -from matplotlib.figure import Figure -from matplotlib.pyplot import rcParams as pyplot_rc -from matplotlib import font_manager -from pandas.plotting import register_matplotlib_converters - -import gc - -from .common import * # unique, CHAR -from .plotdata import PlotData, compareMultiplePD -from .GUICommon import * -from .GUIToolBox import MyMultiCursor, MyNavigationToolbar2Wx, TBAddTool, TBAddCheckTool -from .GUIMeasure import GUIMeasure -from . import icons - -font = {'size' : 8} -matplotlib_rc('font', **font) -pyplot_rc['agg.path.chunksize'] = 20000 - - -class PDFCtrlPanel(wx.Panel): - def __init__(self, parent): - super(PDFCtrlPanel,self).__init__(parent) - self.parent = parent - lb = wx.StaticText( self, -1, 'Number of bins:') - self.scBins = wx.SpinCtrl(self, value='50',size=wx.Size(70,-1)) - self.scBins.SetRange(3, 10000) - self.cbSmooth = wx.CheckBox(self, -1, 'Smooth',(10,10)) - self.cbSmooth.SetValue(False) - dummy_sizer = wx.BoxSizer(wx.HORIZONTAL) - dummy_sizer.Add(lb ,0, flag = wx.CENTER|wx.LEFT,border = 1) - dummy_sizer.Add(self.scBins ,0, flag = wx.CENTER|wx.LEFT,border = 1) - dummy_sizer.Add(self.cbSmooth ,0, flag = wx.CENTER|wx.LEFT,border = 6) - self.SetSizer(dummy_sizer) - self.Bind(wx.EVT_TEXT , self.onPDFOptionChange, self.scBins) - self.Bind(wx.EVT_CHECKBOX, self.onPDFOptionChange) - self.Hide() - - def onPDFOptionChange(self,event=None): - self.parent.load_and_draw(); # DATA HAS CHANGED - -class MinMaxPanel(wx.Panel): - def __init__(self, parent): - super(MinMaxPanel,self).__init__(parent) - self.parent = parent - self.cbxMinMax = wx.CheckBox(self, -1, 'xMinMax',(10,10)) - self.cbyMinMax = wx.CheckBox(self, -1, 'yMinMax',(10,10)) - self.cbxMinMax.SetValue(False) - self.cbyMinMax.SetValue(True) - dummy_sizer = wx.BoxSizer(wx.HORIZONTAL) - dummy_sizer.Add(self.cbxMinMax ,0, flag=wx.CENTER|wx.LEFT, border = 1) - dummy_sizer.Add(self.cbyMinMax ,0, flag=wx.CENTER|wx.LEFT, border = 1) - self.SetSizer(dummy_sizer) - self.Bind(wx.EVT_CHECKBOX, self.onMinMaxChange) - self.Hide() - - def onMinMaxChange(self,event=None): - self.parent.load_and_draw(); # DATA HAS CHANGED - -class CompCtrlPanel(wx.Panel): - def __init__(self, parent): - super(CompCtrlPanel,self).__init__(parent) - self.parent = parent - lblList = ['Relative', '|Relative|','Ratio','Absolute','Y-Y'] - self.rbType = wx.RadioBox(self, label = 'Type', choices = lblList, - majorDimension = 1, style = wx.RA_SPECIFY_ROWS) - dummy_sizer = wx.BoxSizer(wx.HORIZONTAL) - dummy_sizer.Add(self.rbType ,0, flag = wx.CENTER|wx.LEFT,border = 1) - self.SetSizer(dummy_sizer) - self.rbType.Bind(wx.EVT_RADIOBOX,self.onTypeChange) - self.Hide() - - def onTypeChange(self,e): - self.parent.load_and_draw(); # DATA HAS CHANGED - - -class SpectralCtrlPanel(wx.Panel): - def __init__(self, parent): - super(SpectralCtrlPanel,self).__init__(parent) - self.parent = parent - # --- GUI widgets - lb = wx.StaticText( self, -1, 'Type:') - self.cbType = wx.ComboBox(self, choices=['PSD','f x PSD','Amplitude'] , style=wx.CB_READONLY) - self.cbType.SetSelection(0) - lbAveraging = wx.StaticText( self, -1, 'Avg.:') - self.cbAveraging = wx.ComboBox(self, choices=['None','Welch'] , style=wx.CB_READONLY) - self.cbAveraging.SetSelection(1) - self.lbAveragingMethod = wx.StaticText( self, -1, 'Window:') - self.cbAveragingMethod = wx.ComboBox(self, choices=['Hamming','Hann','Rectangular'] , style=wx.CB_READONLY) - self.cbAveragingMethod.SetSelection(0) - self.lbP2 = wx.StaticText( self, -1, '2^n:') - self.scP2 = wx.SpinCtrl(self, value='11',size=wx.Size(40,-1)) - self.lbWinLength = wx.StaticText( self, -1, '(2048) ') - self.scP2.SetRange(3, 19) - lbMaxFreq = wx.StaticText( self, -1, 'Xlim:') - self.tMaxFreq = wx.TextCtrl(self,size = (30,-1),style=wx.TE_PROCESS_ENTER) - self.tMaxFreq.SetValue("-1") - self.cbDetrend = wx.CheckBox(self, -1, 'Detrend',(10,10)) - lbX = wx.StaticText( self, -1, 'x:') - self.cbTypeX = wx.ComboBox(self, choices=['1/x','2pi/x','x'] , style=wx.CB_READONLY) - self.cbTypeX.SetSelection(0) - # Layout - dummy_sizer = wx.BoxSizer(wx.HORIZONTAL) - dummy_sizer.Add(lb ,0, flag = wx.CENTER|wx.LEFT,border = 1) - dummy_sizer.Add(self.cbType ,0, flag = wx.CENTER|wx.LEFT,border = 1) - dummy_sizer.Add(lbAveraging ,0, flag = wx.CENTER|wx.LEFT,border = 6) - dummy_sizer.Add(self.cbAveraging ,0, flag = wx.CENTER|wx.LEFT,border = 1) - dummy_sizer.Add(self.lbAveragingMethod,0, flag = wx.CENTER|wx.LEFT,border = 6) - dummy_sizer.Add(self.cbAveragingMethod,0, flag = wx.CENTER|wx.LEFT,border = 1) - dummy_sizer.Add(self.lbP2 ,0, flag = wx.CENTER|wx.LEFT,border = 6) - dummy_sizer.Add(self.scP2 ,0, flag = wx.CENTER|wx.LEFT,border = 1) - dummy_sizer.Add(self.lbWinLength ,0, flag = wx.CENTER|wx.LEFT,border = 1) - dummy_sizer.Add(lbMaxFreq ,0, flag = wx.CENTER|wx.LEFT,border = 6) - dummy_sizer.Add(self.tMaxFreq ,0, flag = wx.CENTER|wx.LEFT,border = 1) - dummy_sizer.Add(lbX ,0, flag = wx.CENTER|wx.LEFT,border = 6) - dummy_sizer.Add(self.cbTypeX ,0, flag = wx.CENTER|wx.LEFT,border = 1) - dummy_sizer.Add(self.cbDetrend ,0, flag = wx.CENTER|wx.LEFT,border = 7) - self.SetSizer(dummy_sizer) - self.Bind(wx.EVT_COMBOBOX ,self.onSpecCtrlChange) - self.Bind(wx.EVT_TEXT ,self.onP2ChangeText ,self.scP2 ) - self.Bind(wx.EVT_TEXT_ENTER,self.onXlimChange ,self.tMaxFreq ) - self.Bind(wx.EVT_CHECKBOX ,self.onDetrendChange ,self.cbDetrend) - self.Hide() - - def onXlimChange(self,event=None): - self.parent.redraw_same_data(); - def onSpecCtrlChange(self,event=None): - self.parent.load_and_draw() # Data changes - def onDetrendChange(self,event=None): - self.parent.load_and_draw() # Data changes - - def onP2ChangeText(self,event=None): - nExp=self.scP2.GetValue() - self.updateP2(nExp) - self.parent.load_and_draw() # Data changes - - def updateP2(self,P2): - self.lbWinLength.SetLabel("({})".format(2**P2)) - - - - -class PlotTypePanel(wx.Panel): - def __init__(self, parent): - # Superclass constructor - super(PlotTypePanel,self).__init__(parent) - #self.SetBackgroundColour('yellow') - # data - self.parent = parent - # --- Ctrl Panel - self.cbRegular = wx.RadioButton(self, -1, 'Regular',style=wx.RB_GROUP) - self.cbPDF = wx.RadioButton(self, -1, 'PDF' , ) - self.cbFFT = wx.RadioButton(self, -1, 'FFT' , ) - self.cbMinMax = wx.RadioButton(self, -1, 'MinMax' , ) - self.cbCompare = wx.RadioButton(self, -1, 'Compare', ) - self.cbRegular.SetValue(True) - self.Bind(wx.EVT_RADIOBUTTON, self.pdf_select , self.cbPDF ) - self.Bind(wx.EVT_RADIOBUTTON, self.fft_select , self.cbFFT ) - self.Bind(wx.EVT_RADIOBUTTON, self.minmax_select , self.cbMinMax ) - self.Bind(wx.EVT_RADIOBUTTON, self.compare_select, self.cbCompare) - self.Bind(wx.EVT_RADIOBUTTON, self.regular_select, self.cbRegular) - # LAYOUT - cb_sizer = wx.FlexGridSizer(rows=5, cols=1, hgap=0, vgap=0) - cb_sizer.Add(self.cbRegular , 0, flag=wx.ALL, border=1) - cb_sizer.Add(self.cbPDF , 0, flag=wx.ALL, border=1) - cb_sizer.Add(self.cbFFT , 0, flag=wx.ALL, border=1) - cb_sizer.Add(self.cbMinMax , 0, flag=wx.ALL, border=1) - cb_sizer.Add(self.cbCompare , 0, flag=wx.ALL, border=1) - self.SetSizer(cb_sizer) - - def plotType(self): - plotType='Regular' - if self.cbMinMax.GetValue(): - plotType='MinMax' - elif self.cbPDF.GetValue(): - plotType='PDF' - elif self.cbFFT.GetValue(): - plotType='FFT' - elif self.cbCompare.GetValue(): - plotType='Compare' - return plotType - - def regular_select(self, event=None): - self.clear_measures() - self.parent.cbLogY.SetValue(False) - # - self.parent.spcPanel.Hide(); - self.parent.pdfPanel.Hide(); - self.parent.cmpPanel.Hide(); - self.parent.mmxPanel.Hide(); - self.parent.slEsth.Hide(); - self.parent.plotsizer.Layout() - # - self.parent.load_and_draw() # Data changes - - def compare_select(self, event=None): - self.clear_measures() - self.parent.cbLogY.SetValue(False) - self.parent.show_hide(self.parent.cmpPanel, self.cbCompare.GetValue()) - self.parent.spcPanel.Hide(); - self.parent.pdfPanel.Hide(); - self.parent.mmxPanel.Hide(); - self.parent.plotsizer.Layout() - self.parent.load_and_draw() # Data changes - - def fft_select(self, event=None): - self.clear_measures() - self.parent.show_hide(self.parent.spcPanel, self.cbFFT.GetValue()) - self.parent.cbLogY.SetValue(self.cbFFT.GetValue()) - self.parent.pdfPanel.Hide(); - self.parent.mmxPanel.Hide(); - self.parent.plotsizer.Layout() - self.parent.load_and_draw() # Data changes - - def pdf_select(self, event=None): - self.clear_measures() - self.parent.cbLogX.SetValue(False) - self.parent.cbLogY.SetValue(False) - self.parent.show_hide(self.parent.pdfPanel, self.cbPDF.GetValue()) - self.parent.spcPanel.Hide(); - self.parent.cmpPanel.Hide(); - self.parent.mmxPanel.Hide(); - self.parent.plotsizer.Layout() - self.parent.load_and_draw() # Data changes - - def minmax_select(self, event): - self.clear_measures() - self.parent.cbLogY.SetValue(False) - self.parent.show_hide(self.parent.mmxPanel, self.cbMinMax.GetValue()) - self.parent.spcPanel.Hide(); - self.parent.pdfPanel.Hide(); - self.parent.cmpPanel.Hide(); - self.parent.plotsizer.Layout() - self.parent.load_and_draw() # Data changes - - def clear_measures(self): - self.parent.rightMeasure.clear() - self.parent.leftMeasure.clear() - self.parent.lbDeltaX.SetLabel('') - self.parent.lbDeltaY.SetLabel('') - -class EstheticsPanel(wx.Panel): - def __init__(self, parent): - wx.Panel.__init__(self, parent) - self.parent=parent - #self.SetBackgroundColour('red') - - lbFont = wx.StaticText( self, -1, 'Font:') - self.cbFont = wx.ComboBox(self, choices=['6','7','8','9','10','11','12','13','14','15','16','17','18'] , style=wx.CB_READONLY) - self.cbFont.SetSelection(2) - # NOTE: we don't offer "best" since best is slow - lbLegend = wx.StaticText( self, -1, 'Legend:') - self.cbLegend = wx.ComboBox(self, choices=['None','Upper right','Upper left','Lower left','Lower right','Right','Center left','Center right','Lower center','Upper center','Center'] , style=wx.CB_READONLY) - self.cbLegend.SetSelection(1) - lbLgdFont = wx.StaticText( self, -1, 'Legend font:') - self.cbLgdFont = wx.ComboBox(self, choices=['6','7','8','9','10','11','12','13','14','15','16','17','18'] , style=wx.CB_READONLY) - self.cbLgdFont.SetSelection(2) - lbLW = wx.StaticText( self, -1, 'Line width:') - self.cbLW = wx.ComboBox(self, choices=['0.5','1.0','1.5','2.0','2.5','3.0'] , style=wx.CB_READONLY) - self.cbLW.SetSelection(2) - lbMS = wx.StaticText( self, -1, 'Marker size:') - self.cbMS= wx.ComboBox(self, choices=['0.5','1','2','3','4','5','6','7','8'] , style=wx.CB_READONLY) - self.cbMS.SetSelection(2) - - # Layout - #dummy_sizer = wx.BoxSizer(wx.HORIZONTAL) - dummy_sizer = wx.WrapSizer(orient=wx.HORIZONTAL) - dummy_sizer.Add(lbFont ,0, flag = wx.CENTER|wx.LEFT,border = 1) - dummy_sizer.Add(self.cbFont ,0, flag = wx.CENTER|wx.LEFT,border = 1) - dummy_sizer.Add(lbLW ,0, flag = wx.CENTER|wx.LEFT,border = 5) - dummy_sizer.Add(self.cbLW ,0, flag = wx.CENTER|wx.LEFT,border = 1) - dummy_sizer.Add(lbMS ,0, flag = wx.CENTER|wx.LEFT,border = 5) - dummy_sizer.Add(self.cbMS ,0, flag = wx.CENTER|wx.LEFT,border = 1) - dummy_sizer.Add(lbLegend ,0, flag = wx.CENTER|wx.LEFT,border = 5) - dummy_sizer.Add(self.cbLegend ,0, flag = wx.CENTER|wx.LEFT,border = 1) - dummy_sizer.Add(lbLgdFont ,0, flag = wx.CENTER|wx.LEFT,border = 5) - dummy_sizer.Add(self.cbLgdFont ,0, flag = wx.CENTER|wx.LEFT,border = 1) - self.SetSizer(dummy_sizer) - self.Hide() - # Callbacks - self.Bind(wx.EVT_COMBOBOX ,self.onAnyEsthOptionChange) - self.cbFont.Bind(wx.EVT_COMBOBOX ,self.onFontOptionChange) - - def onAnyEsthOptionChange(self,event=None): - self.parent.redraw_same_data() - - def onFontOptionChange(self,event=None): - matplotlib_rc('font', **{'size':int(self.cbFont.Value) }) # affect all (including ticks) - self.onAnyEsthOptionChange() - - -class PlotPanel(wx.Panel): - def __init__(self, parent, selPanel,infoPanel=None, mainframe=None): - - # Superclass constructor - super(PlotPanel,self).__init__(parent) - - # Font handling - font = parent.GetFont() - font.SetPointSize(font.GetPointSize()-1) - self.SetFont(font) - # Preparing a special font manager for chinese characters - self.specialFont=None - try: - pyplot_path = matplotlib.get_data_path() - except: - pyplot_path = pyplot_rc['datapath'] - CH_F_PATHS = [ - os.path.join(pyplot_path, 'fonts/ttf/SimHei.ttf'), - os.path.join(os.path.dirname(__file__),'../SimHei.ttf')] - for fpath in CH_F_PATHS: - if os.path.exists(fpath): - fontP = font_manager.FontProperties(fname=fpath) - fontP.set_size(font.GetPointSize()) - self.specialFont=fontP - break - # data - self.selPanel = selPanel # <<< dependency with selPanel should be minimum - self.selMode = '' - self.infoPanel=infoPanel - self.infoPanel.setPlotMatrixCallbacks(self._onPlotMatrixLeftClick, self._onPlotMatrixRightClick) - self.parent = parent - self.mainframe= mainframe - self.plotData = [] - self.plotDataOptions=dict() - if self.selPanel is not None: - bg=self.selPanel.BackgroundColour - self.SetBackgroundColour(bg) # sowhow, our parent has a wrong color - #self.SetBackgroundColour('red') - self.leftMeasure = GUIMeasure(1, 'firebrick') - self.rightMeasure = GUIMeasure(2, 'darkgreen') - self.xlim_prev = [[0, 1]] - self.ylim_prev = [[0, 1]] - # GUI - self.fig = Figure(facecolor="white", figsize=(1, 1)) - register_matplotlib_converters() - self.canvas = FigureCanvas(self, -1, self.fig) - self.canvas.mpl_connect('motion_notify_event', self.onMouseMove) - self.canvas.mpl_connect('button_press_event', self.onMouseClick) - self.canvas.mpl_connect('button_release_event', self.onMouseRelease) - self.canvas.mpl_connect('draw_event', self.onDraw) - self.clickLocation = (None, 0, 0) - - self.navTBTop = MyNavigationToolbar2Wx(self.canvas, ['Home', 'Pan']) - self.navTBBottom = MyNavigationToolbar2Wx(self.canvas, ['Subplots', 'Save']) - TBAddCheckTool(self.navTBBottom,'', icons.chart.GetBitmap(), self.onEsthToggle) - self.esthToggle=False - - self.navTBBottom.Realize() - - #self.navTB = wx.ToolBar(self, style=wx.TB_HORIZONTAL|wx.TB_HORZ_LAYOUT|wx.TB_NODIVIDER|wx.TB_FLAT) - #self.navTB.SetMargins(0,0) - #self.navTB.SetToolPacking(0) - #self.navTB.AddCheckTool(-1, label='', bitmap1=icons.chart.GetBitmap()) - #self.navTB.Realize() - - self.toolbar_sizer = wx.BoxSizer(wx.VERTICAL) - self.toolbar_sizer.Add(self.navTBTop) - self.toolbar_sizer.Add(self.navTBBottom) - - - # --- Tool Panel - self.toolSizer= wx.BoxSizer(wx.VERTICAL) - # --- PlotType Panel - self.pltTypePanel= PlotTypePanel(self); - # --- Plot type specific options - self.spcPanel = SpectralCtrlPanel(self) - self.pdfPanel = PDFCtrlPanel(self) - self.cmpPanel = CompCtrlPanel(self) - self.mmxPanel = MinMaxPanel(self) - # --- Esthetics panel - self.esthPanel = EstheticsPanel(self) - - - # --- Ctrl Panel - self.ctrlPanel= wx.Panel(self) - #self.ctrlPanel.SetBackgroundColour('blue') - # Check Boxes - self.cbCurveType = wx.ComboBox(self.ctrlPanel, choices=['Plain','LS','Markers','Mix'] , style=wx.CB_READONLY) - self.cbCurveType.SetSelection(1) - self.cbSub = wx.CheckBox(self.ctrlPanel, -1, 'Subplot',(10,10)) - self.cbLogX = wx.CheckBox(self.ctrlPanel, -1, 'Log-x',(10,10)) - self.cbLogY = wx.CheckBox(self.ctrlPanel, -1, 'Log-y',(10,10)) - self.cbSync = wx.CheckBox(self.ctrlPanel, -1, 'Sync-x',(10,10)) - self.cbXHair = wx.CheckBox(self.ctrlPanel, -1, 'CrossHair',(10,10)) - self.cbPlotMatrix = wx.CheckBox(self.ctrlPanel, -1, 'Matrix',(10,10)) - self.cbAutoScale = wx.CheckBox(self.ctrlPanel, -1, 'AutoScale',(10,10)) - self.cbGrid = wx.CheckBox(self.ctrlPanel, -1, 'Grid',(10,10)) - self.cbStepPlot = wx.CheckBox(self.ctrlPanel, -1, 'StepPlot',(10,10)) - self.cbMeasure = wx.CheckBox(self.ctrlPanel, -1, 'Measure',(10,10)) - #self.cbSub.SetValue(True) # DEFAULT TO SUB? - self.cbSync.SetValue(True) - self.cbXHair.SetValue(True) # Have cross hair by default - self.cbAutoScale.SetValue(True) - # Callbacks - self.Bind(wx.EVT_CHECKBOX, self.redraw_event , self.cbSub ) - self.Bind(wx.EVT_COMBOBOX, self.redraw_event , self.cbCurveType) - self.Bind(wx.EVT_CHECKBOX, self.log_select , self.cbLogX ) - self.Bind(wx.EVT_CHECKBOX, self.log_select , self.cbLogY ) - self.Bind(wx.EVT_CHECKBOX, self.redraw_event , self.cbSync ) - self.Bind(wx.EVT_CHECKBOX, self.crosshair_event , self.cbXHair ) - self.Bind(wx.EVT_CHECKBOX, self.plot_matrix_select, self.cbPlotMatrix ) - self.Bind(wx.EVT_CHECKBOX, self.redraw_event , self.cbAutoScale ) - self.Bind(wx.EVT_CHECKBOX, self.redraw_event , self.cbGrid ) - self.Bind(wx.EVT_CHECKBOX, self.redraw_event , self.cbStepPlot ) - self.Bind(wx.EVT_CHECKBOX, self.measure_select , self.cbMeasure ) - self.Bind(wx.EVT_CHECKBOX, self.measure_select , self.cbMeasure ) - # LAYOUT - cb_sizer = wx.FlexGridSizer(rows=4, cols=3, hgap=0, vgap=0) - cb_sizer.Add(self.cbCurveType , 0, flag=wx.ALL, border=1) - cb_sizer.Add(self.cbSub , 0, flag=wx.ALL, border=1) - cb_sizer.Add(self.cbAutoScale , 0, flag=wx.ALL, border=1) - cb_sizer.Add(self.cbLogX , 0, flag=wx.ALL, border=1) - cb_sizer.Add(self.cbLogY , 0, flag=wx.ALL, border=1) - cb_sizer.Add(self.cbStepPlot , 0, flag=wx.ALL, border=1) - cb_sizer.Add(self.cbXHair , 0, flag=wx.ALL, border=1) - cb_sizer.Add(self.cbGrid , 0, flag=wx.ALL, border=1) - cb_sizer.Add(self.cbSync , 0, flag=wx.ALL, border=1) - cb_sizer.Add(self.cbPlotMatrix, 0, flag=wx.ALL, border=1) - cb_sizer.Add(self.cbMeasure , 0, flag=wx.ALL, border=1) - - self.ctrlPanel.SetSizer(cb_sizer) - - # --- Crosshair Panel - crossHairPanel= wx.Panel(self) - self.lbCrossHairX = wx.StaticText(crossHairPanel, -1, 'x = ... ') - self.lbCrossHairY = wx.StaticText(crossHairPanel, -1, 'y = ... ') - self.lbDeltaX = wx.StaticText(crossHairPanel, -1, ' ') - self.lbDeltaY = wx.StaticText(crossHairPanel, -1, ' ') - self.lbCrossHairX.SetFont(getMonoFont(self)) - self.lbCrossHairY.SetFont(getMonoFont(self)) - self.lbDeltaX.SetFont(getMonoFont(self)) - self.lbDeltaY.SetFont(getMonoFont(self)) - cbCH = wx.FlexGridSizer(rows=4, cols=1, hgap=0, vgap=0) - cbCH.Add(self.lbCrossHairX , 0, flag=wx.ALL, border=1) - cbCH.Add(self.lbCrossHairY , 0, flag=wx.ALL, border=1) - cbCH.Add(self.lbDeltaX , 0, flag=wx.ALL, border=1) - cbCH.Add(self.lbDeltaY , 0, flag=wx.ALL, border=1) - crossHairPanel.SetSizer(cbCH) - - # --- layout of panels - row_sizer = wx.BoxSizer(wx.HORIZONTAL) - sl2 = wx.StaticLine(self, -1, size=wx.Size(1,-1), style=wx.LI_VERTICAL) - sl3 = wx.StaticLine(self, -1, size=wx.Size(1,-1), style=wx.LI_VERTICAL) - sl4 = wx.StaticLine(self, -1, size=wx.Size(1,-1), style=wx.LI_VERTICAL) - row_sizer.Add(self.pltTypePanel , 0 , flag=wx.LEFT|wx.RIGHT|wx.CENTER , border=1) - row_sizer.Add(sl2 , 0 , flag=wx.LEFT|wx.RIGHT|wx.EXPAND|wx.CENTER, border=0) - row_sizer.Add(self.toolbar_sizer, 0 , flag=wx.LEFT|wx.RIGHT|wx.CENTER , border=1) - row_sizer.Add(sl3 , 0 , flag=wx.LEFT|wx.RIGHT|wx.EXPAND|wx.CENTER, border=0) - row_sizer.Add(self.ctrlPanel , 1 , flag=wx.LEFT|wx.RIGHT|wx.EXPAND|wx.CENTER, border=0) - row_sizer.Add(sl4 , 0 , flag=wx.LEFT|wx.RIGHT|wx.EXPAND|wx.CENTER, border=0) - row_sizer.Add(crossHairPanel , 0 , flag=wx.LEFT|wx.RIGHT|wx.EXPAND|wx.CENTER, border=1) - - plotsizer = wx.BoxSizer(wx.VERTICAL) - self.slCtrl = wx.StaticLine(self, -1, size=wx.Size(-1,1), style=wx.LI_HORIZONTAL) - self.slCtrl.Hide() - self.slEsth = wx.StaticLine(self, -1, size=wx.Size(-1,1), style=wx.LI_HORIZONTAL) - self.slEsth.Hide() - sl1 = wx.StaticLine(self, -1, size=wx.Size(-1,1), style=wx.LI_HORIZONTAL) - plotsizer.Add(self.toolSizer,0,flag = wx.EXPAND|wx.CENTER|wx.TOP|wx.BOTTOM,border = 10) - plotsizer.Add(self.canvas ,1,flag = wx.EXPAND,border = 5 ) - plotsizer.Add(sl1 ,0,flag = wx.EXPAND,border = 0) - plotsizer.Add(self.spcPanel ,0,flag = wx.EXPAND|wx.CENTER|wx.TOP|wx.BOTTOM,border = 10) - plotsizer.Add(self.pdfPanel ,0,flag = wx.EXPAND|wx.CENTER|wx.TOP|wx.BOTTOM,border = 10) - plotsizer.Add(self.cmpPanel ,0,flag = wx.EXPAND|wx.CENTER|wx.TOP|wx.BOTTOM,border = 10) - plotsizer.Add(self.mmxPanel ,0,flag = wx.EXPAND|wx.CENTER|wx.TOP|wx.BOTTOM,border = 10) - plotsizer.Add(self.slEsth ,0,flag = wx.EXPAND,border = 0) - plotsizer.Add(self.esthPanel,0,flag = wx.EXPAND|wx.CENTER|wx.TOP|wx.BOTTOM,border = 10) - plotsizer.Add(self.slCtrl ,0,flag = wx.EXPAND,border = 0) - plotsizer.Add(row_sizer ,0,flag = wx.EXPAND|wx.NORTH ,border = 2) - - self.show_hide(self.spcPanel, self.pltTypePanel.cbFFT.GetValue()) - self.show_hide(self.cmpPanel, self.pltTypePanel.cbCompare.GetValue()) - self.show_hide(self.pdfPanel, self.pltTypePanel.cbPDF.GetValue()) - self.show_hide(self.mmxPanel, self.pltTypePanel.cbMinMax.GetValue()) - - self.SetSizer(plotsizer) - self.plotsizer=plotsizer; - self.set_subplot_spacing(init=True) - - def onEsthToggle(self,event): - self.esthToggle=not self.esthToggle - if self.esthToggle: - self.slCtrl.Show() - self.esthPanel.Show() - else: - self.slCtrl.Hide() - self.esthPanel.Hide() - self.plotsizer.Layout() - event.Skip() - - def set_subplot_spacing(self, init=False): - """ - Handle default subplot spacing - - NOTE: - - Tight fails when the ylabel is too long, especially for fft with multiplt signals - - might need to change depending on window size/resizing - - need to change if right axis needed - - this will override the user settings - """ - #self.fig.set_tight_layout(True) # NOTE: works almost fine, but problem with FFT multiple - # TODO this is definitely not generic, but tight fails.. - if init: - # NOTE: at init size is (20,20) because sizer is not initialized yet - bottom = 0.12 - left = 0.12 - else: - if self.Size[1]<300: - bottom=0.20 - elif self.Size[1]<350: - bottom=0.18 - elif self.Size[1]<430: - bottom=0.16 - elif self.Size[1]<600: - bottom=0.13 - elif self.Size[1]<800: - bottom=0.09 - else: - bottom=0.07 - if self.Size[0]<300: - left=0.22 - elif self.Size[0]<450: - left=0.20 - elif self.Size[0]<950: - left=0.12 - else: - left=0.06 - #print(self.Size,'bottom', bottom, 'left',left) - if self.cbPlotMatrix.GetValue(): # TODO detect it - self.fig.subplots_adjust(top=0.97,bottom=bottom,left=left,right=0.98-left) - else: - self.fig.subplots_adjust(top=0.97,bottom=bottom,left=left,right=0.98) - - def plot_matrix_select(self, event): - self.infoPanel.togglePlotMatrix(self.cbPlotMatrix.GetValue()) - self.redraw_same_data() - - def measure_select(self, event): - if self.cbMeasure.IsChecked(): - self.cbAutoScale.SetValue(False) - self.redraw_same_data() - - def redraw_event(self, event): - self.redraw_same_data() - - def log_select(self, event): - if self.pltTypePanel.cbPDF.GetValue(): - self.cbLogX.SetValue(False) - self.cbLogY.SetValue(False) - else: - self.redraw_same_data() - - def crosshair_event(self, event): - try: - self.multiCursors.vertOn =self.cbXHair.GetValue() - self.multiCursors.horizOn=self.cbXHair.GetValue() - self.multiCursors._update() - except: - pass - - def show_hide(self,panel,bShow): - if bShow: - panel.Show() - self.slEsth.Show() - else: - self.slEsth.Hide() - panel.Hide() - - @property - def sharex(self): - return self.cbSync.IsChecked() and (not self.pltTypePanel.cbPDF.GetValue()) - - def set_subplots(self,nPlots): - self.set_subplot_spacing() - # Creating subplots - for ax in self.fig.axes: - self.fig.delaxes(ax) - sharex=None - for i in range(nPlots): - # Vertical stack - if i==0: - ax=self.fig.add_subplot(nPlots,1,i+1) - if self.sharex: - sharex=ax - else: - ax=self.fig.add_subplot(nPlots,1,i+1,sharex=sharex) - # Horizontal stack - #self.fig.add_subplot(1,nPlots,i+1) - - def onMouseMove(self, event): - if event.inaxes: - x, y = event.xdata, event.ydata - self.lbCrossHairX.SetLabel('x =' + self.formatLabelValue(x)) - self.lbCrossHairY.SetLabel('y =' + self.formatLabelValue(y)) - - def onMouseClick(self, event): - self.clickLocation = (event.inaxes, event.xdata, event.ydata) - - def onMouseRelease(self, event): - if self.cbMeasure.GetValue(): - for ax, ax_idx in zip(self.fig.axes, range(len(self.fig.axes))): - if event.inaxes == ax: - x, y = event.xdata, event.ydata - if self.clickLocation != (ax, x, y): - # Ignore measurements for zoom-actions. Possibly add small tolerance. - # Zoom-actions disable autoscale - self.cbAutoScale.SetValue(False) - return - if event.button == 1: - self.infoPanel.setMeasurements((x, y), None) - self.leftMeasure.set(ax_idx, x, y) - self.leftMeasure.plot(ax, ax_idx) - elif event.button == 3: - self.infoPanel.setMeasurements(None, (x, y)) - self.rightMeasure.set(ax_idx, x, y) - self.rightMeasure.plot(ax, ax_idx) - else: - return - if self.cbAutoScale.IsChecked() is False: - self._restore_limits() - - if self.leftMeasure.axis_idx == self.rightMeasure.axis_idx and self.leftMeasure.axis_idx != -1: - self.lbDeltaX.SetLabel('dx=' + self.formatLabelValue(self.rightMeasure.x - self.leftMeasure.x)) - self.lbDeltaY.SetLabel('dy=' + self.formatLabelValue(self.rightMeasure.y - self.leftMeasure.y)) - else: - self.lbDeltaX.SetLabel('') - self.lbDeltaY.SetLabel('') - return - - def onDraw(self, event): - self._store_limits() - - def formatLabelValue(self, value): - try: - if abs(value)<1000 and abs(value)>1e-4: - s = '{:10.5f}'.format(value) - else: - s = '{:10.3e}'.format(value) - except TypeError: - s = ' ' - return s - - def removeTools(self,event=None,Layout=True): - try: - self.toolPanel.destroy() # call the "destroy" function which might clean up data - except: - pass - try: - # Python3 - self.toolSizer.Clear(delete_windows=True) # Delete Windows - except: - # Python2 - if hasattr(self,'toolPanel'): - self.toolSizer.Remove(self.toolPanel) - self.toolPanel.Destroy() - del self.toolPanel - self.toolSizer.Clear() # Delete Windows - if Layout: - self.plotsizer.Layout() - - def showTool(self,toolName=''): - from .GUITools import TOOLS - self.Freeze() - self.removeTools(Layout=False) - if toolName in TOOLS.keys(): - self.toolPanel=TOOLS[toolName](self) # calling the panel constructor - else: - raise Exception('Unknown tool {}'.format(toolName)) - self.toolSizer.Add(self.toolPanel, 0, wx.EXPAND|wx.ALL, 5) - self.plotsizer.Layout() - self.Thaw() - - def setPD_PDF(self,PD,c): - """ Convert plot data to PDF data based on GUI options""" - # ---PDF - nBins = self.pdfPanel.scBins.GetValue() - bSmooth = self.pdfPanel.cbSmooth.GetValue() - nBins_out= PD.toPDF(nBins,bSmooth) - if nBins_out!=nBins: - self.pdfPanel.scBins.SetValue(nBins) - - def setPD_MinMax(self,PD): - """ Convert plot data to MinMax data based on GUI options""" - yScale=self.mmxPanel.cbyMinMax.IsChecked() - xScale=self.mmxPanel.cbxMinMax.IsChecked() - try: - PD.toMinMax(xScale,yScale) - except Exception as e: - self.mmxPanel.cbxMinMax.SetValue(False) - raise e # Used to be Warn - - def setPD_FFT(self,pd): - """ Convert plot data to FFT data based on GUI options""" - yType = self.spcPanel.cbType.GetStringSelection() - xType = self.spcPanel.cbTypeX.GetStringSelection() - avgMethod = self.spcPanel.cbAveraging.GetStringSelection() - avgWindow = self.spcPanel.cbAveragingMethod.GetStringSelection() - bDetrend = self.spcPanel.cbDetrend.IsChecked() - nExp = self.spcPanel.scP2.GetValue() - # Convert plotdata to FFT data - try: - Info = pd.toFFT(yType=yType, xType=xType, avgMethod=avgMethod, avgWindow=avgWindow, bDetrend=bDetrend, nExp=nExp) - # Trigger - if hasattr(Info,'nExp') and Info.nExp!=nExp: - self.spcPanel.scP2.SetValue(Info.nExp) - self.spcPanel.updateP2(Info.nExp) - except Exception as e: - self.spcPanel.Hide(); - self.plotsizer.Layout() - raise e - - - def transformPlotData(self,PD): - """" - Apply MinMax, PDF or FFT transform to plot based on GUI data - """ - plotType=self.pltTypePanel.plotType() - if plotType=='MinMax': - self.setPD_MinMax(PD) - elif plotType=='PDF': - self.setPD_PDF(PD,PD.c) - elif plotType=='FFT': - self.setPD_FFT(PD) - - def getPlotData(self,plotType): - ID,SameCol,selMode=self.selPanel.getPlotDataSelection() - self.selMode=selMode # we store the selection mode - del self.plotData - self.plotData=[] - tabs=self.selPanel.tabList.getTabs() # TODO, selPanel should just return the PlotData... - try: - for i,idx in enumerate(ID): - # Initialize each plotdata based on selected table and selected id channels - pd=PlotData(); - pd.fromIDs(tabs,i,idx,SameCol, self.plotDataOptions) - # Possible change of data - if plotType=='MinMax': - self.setPD_MinMax(pd) - elif plotType=='PDF': - self.setPD_PDF(pd,pd.c) - elif plotType=='FFT': - self.setPD_FFT(pd) - self.plotData.append(pd) - except Exception as e: - self.plotData=[] - raise e - - def PD_Compare(self,mode): - """ Perform comparison of the selected PlotData, returns new plotData with the comparison. """ - sComp = self.cmpPanel.rbType.GetStringSelection() - try: - self.plotData = compareMultiplePD(self.plotData,mode, sComp) - except Exception as e: - self.pltTypePanel.cbRegular.SetValue(True) - raise e - - def _onPlotMatrixLeftClick(self, event): - """Toggle plot-states from None, to left-axis, to right-axis. - Left-click goes forwards, right-click goes backwards. - IndexError to avoid "holes" in matrix with outer adjacent populated entries - """ - btn = event.GetEventObject() - label = btn.GetLabelText() - if label == '-': - btn.SetLabel('1') - try: - self.infoPanel.getPlotMatrix(self.plotData, self.cbSub.IsChecked()) - except IndexError: - btn.SetLabel('-') - elif label == '1': - btn.SetLabel('2') - else: - btn.SetLabel('-') - try: - self.infoPanel.getPlotMatrix(self.plotData, self.cbSub.IsChecked()) - except IndexError: - btn.SetLabel('1') - self.redraw_same_data() - - def _onPlotMatrixRightClick(self, event): - btn = event.GetEventObject() - label = btn.GetLabelText() - if label == '-': - btn.SetLabel('2') - try: - self.infoPanel.getPlotMatrix(self.plotData, self.cbSub.IsChecked()) - except IndexError: - btn.SetLabel('-') - elif label == '1': - btn.SetLabel('-') - try: - self.infoPanel.getPlotMatrix(self.plotData, self.cbSub.IsChecked()) - except IndexError: - btn.SetLabel('2') - else: - btn.SetLabel('1') - self.redraw_same_data() - - def set_axes_lim(self, PDs, axis): - """ - It's usually faster to set the axis limits first (before plotting) - and disable autoscaling. This way the limits are not recomputed when plot data are added. - Also, we already have computed the min and max, so we leverage that. - NOTE: - doesnt not work with strings - doesnt not work for FFT and compare - - INPUTS: - PDs: list of plot data - """ - # TODO option for tight axes - tight=False - - plotType=self.pltTypePanel.plotType() - if plotType in ['FFT','Compare']: - axis.autoscale(True, axis='both', tight=tight) - return - vXString=[PDs[i].xIsString for i in axis.iPD] - vYString=[PDs[i].yIsString for i in axis.iPD] - if not any(vXString) and not self.cbLogX.IsChecked(): - try: - xMin=np.min([PDs[i]._xMin[0] for i in axis.iPD]) - xMax=np.max([PDs[i]._xMax[0] for i in axis.iPD]) - if np.isclose(xMin,xMax): - delta=1 if np.isclose(xMax,0) else 0.1*xMax - else: - if tight: - delta=0 - else: - delta = (xMax-xMin)*pyplot_rc['axes.xmargin'] - axis.set_xlim(xMin-delta,xMax+delta) - axis.autoscale(False, axis='x', tight=False) - except: - pass - if not any(vYString) and not self.cbLogY.IsChecked(): - try: - yMin=np.min([PDs[i]._yMin[0] for i in axis.iPD]) - yMax=np.max([PDs[i]._yMax[0] for i in axis.iPD]) - delta = (yMax-yMin)*pyplot_rc['axes.ymargin'] - if np.isclose(yMin,yMax): - delta=1 if np.isclose(yMax,0) else 0.1*yMax - else: - if tight: - delta=0 - else: - delta = (yMax-yMin)*pyplot_rc['axes.xmargin'] - axis.set_ylim(yMin-delta,yMax+delta) - axis.autoscale(False, axis='y', tight=False) - except: - pass - - def plot_all(self, keep_limits=True): - self.multiCursors=[] - - if self.cbMeasure.GetValue() is False: - for measure in [self.leftMeasure, self.rightMeasure]: - measure.clear() - self.infoPanel.setMeasurements(None, None) - self.lbDeltaX.SetLabel('') - self.lbDeltaY.SetLabel('') - - axes=self.fig.axes - PD=self.plotData - - - # --- Plot options - bStep = self.cbStepPlot.IsChecked() - plot_options = dict() - plot_options['lw']=float(self.esthPanel.cbLW.Value) - plot_options['ms']=float(self.esthPanel.cbMS.Value) - if self.cbCurveType.Value=='Plain': - plot_options['LineStyles'] = ['-'] - plot_options['Markers'] = [''] - elif self.cbCurveType.Value=='LS': - plot_options['LineStyles'] = ['-','--','-.',':'] - plot_options['Markers'] = [''] - elif self.cbCurveType.Value=='Markers': - plot_options['LineStyles'] = [''] - plot_options['Markers'] = ['o','d','v','^','s'] - elif self.cbCurveType.Value=='Mix': # NOTE, can be improved - plot_options['LineStyles'] = ['-','--', '-','-','-'] - plot_options['Markers'] = ['' ,'' ,'o','^','s'] - else: - # Combination of linestyles markers, colors, etc. - # But at that stage, if the user really want this, then we can implement an option to set styles per plot. Not high priority. - raise Exception('Not implemented') - - - - # --- Font options - font_options = dict() - font_options_legd = dict() - font_options['size'] = int(self.esthPanel.cbFont.Value) # affect labels - font_options_legd['fontsize'] = int(self.esthPanel.cbLgdFont.Value) - needChineseFont = any([pd.needChineseFont for pd in PD]) - if needChineseFont and self.specialFont is not None: - font_options['fontproperties']= self.specialFont - font_options_legd['prop'] = self.specialFont - - # --- Loop on axes. Either use ax.iPD to chose the plot data, or rely on plotmatrix - for axis_idx, ax_left in enumerate(axes): - ax_right = None - # Checks - vDate=[PD[i].yIsDate for i in ax_left.iPD] - if any(vDate) and len(vDate)>1: - Error(self,'Cannot plot date and other value on the same axis') - return - - # Set limit before plot when possible, for optimization - self.set_axes_lim(PD, ax_left) - - # Actually plot - pm = self.infoPanel.getPlotMatrix(PD, self.cbSub.IsChecked()) - __, bAllNegLeft = self.plotSignals(ax_left, axis_idx, PD, pm, 1, bStep, plot_options) - ax_right, bAllNegRight = self.plotSignals(ax_left, axis_idx, PD, pm, 2, bStep, plot_options) - - self.infoPanel.setMeasurements(self.leftMeasure.get_xydata(), self.rightMeasure.get_xydata()) - for measure in [self.leftMeasure, self.rightMeasure]: - measure.plot(ax_left, axis_idx) - - # Log Axes - if self.cbLogX.IsChecked(): - try: - ax_left.set_xscale("log", nonpositive='clip') # latest - except: - ax_left.set_xscale("log", nonposx='clip') # legacy - - if self.cbLogY.IsChecked(): - if bAllNegLeft is False: - try: - ax_left.set_yscale("log", nonpositive='clip') # latest - except: - ax_left.set_yscale("log", nonposy='clip') - if bAllNegRight is False and ax_right is not None: - try: - ax_right.set_yscale("log", nonpositive='clip') # latest - except: - ax_left.set_yscale("log", nonposy='clip') # legacy - - # XLIM - TODO FFT ONLY NASTY - if self.pltTypePanel.cbFFT.GetValue(): - try: - xlim=float(self.spcPanel.tMaxFreq.GetLineText(0)) - if xlim>0: - ax_left.set_xlim([0,xlim]) - pd=PD[ax_left.iPD[0]] - I=pd.x 0 and len(yleft_labels) <= 3: - ax_left.set_ylabel(' and '.join(yleft_labels), **font_options) - elif ax_left is not None: - ax_left.set_ylabel('') - if len(yright_labels) > 0 and len(yright_labels) <= 3: - ax_right.set_ylabel(' and '.join(yright_labels), **font_options) - elif ax_right is not None: - ax_right.set_ylabel('') - - # Legends - lgdLoc = self.esthPanel.cbLegend.Value.lower() - if (self.pltTypePanel.cbCompare.GetValue() or - ((len(yleft_legends) + len(yright_legends)) > 1)): - if lgdLoc !='none': - if len(yleft_legends) > 0: - ax_left.legend(fancybox=False, loc=lgdLoc, **font_options_legd) - if ax_right is not None and len(yright_legends) > 0: - ax_right.legend(fancybox=False, loc=4, **font_options_legd) - elif len(axes)>1 and len(axes)==len(PD): - # TODO: can this be removed? If there is only one unique signal - # per subplot, normally only ylabel is displayed and no legend. - # Special case when we have subplots and all plots have the same label - if lgdLoc !='none': - usy = unique([pd.sy for pd in PD]) - if len(usy)==1: - for ax in axes: - ax.legend(fancybox=False, loc=lgdLoc, **font_options_legd) - - axes[-1].set_xlabel(PD[axes[-1].iPD[0]].sx, **font_options) - - #print('sy :',[pd.sy for pd in PD]) - #print('syl:',[pd.syl for pd in PD]) - - # --- Cursors for each individual plot - # NOTE: cursors needs to be stored in the object! - #for ax_left in self.fig.axes: - # self.cursors.append(MyCursor(ax_left,horizOn=True, vertOn=False, useblit=True, color='gray', linewidth=0.5, linestyle=':')) - # Vertical cusor for all, commonly - bXHair = self.cbXHair.GetValue() - self.multiCursors = MyMultiCursor(self.canvas, tuple(self.fig.axes), useblit=True, horizOn=bXHair, vertOn=bXHair, color='gray', linewidth=0.5, linestyle=':') - - def plotSignals(self, ax, axis_idx, PD, pm, left_right, is_step, opts): - axis = None - bAllNeg = True - if pm is None: - loop_range = ax.iPD - else: - loop_range = range(len(PD)) - - iPlot=-1 - for signal_idx in loop_range: - do_plot = False - if left_right == 1 and (pm is None or pm[signal_idx][axis_idx] == left_right): - do_plot = True - axis = ax - elif left_right == 2 and pm is not None and pm[signal_idx][axis_idx] == left_right: - do_plot = True - if axis is None: - axis = ax.twinx() - ax.set_zorder(axis.get_zorder()+1) - ax.patch.set_visible(False) - axis._get_lines.prop_cycler = ax._get_lines.prop_cycler - pd=PD[signal_idx] - if do_plot: - iPlot+=1 - # --- styling per plot - if len(pd.x)==1: - marker='o'; ls='' - else: - # TODO allow PlotData to override for "per plot" options in the future - marker = opts['Markers'][np.mod(iPlot,len(opts['Markers']))] - ls = opts['LineStyles'][np.mod(iPlot,len(opts['LineStyles']))] - if is_step: - plot = axis.step - else: - plot = axis.plot - plot(pd.x,pd.y,label=pd.syl,ms=opts['ms'], lw=opts['lw'], marker=marker, ls=ls) - try: - bAllNeg = bAllNeg and all(pd.y<=0) - except: - pass # Dates or strings - return axis, bAllNeg - - def findPlotMode(self,PD): - uTabs = unique([pd.it for pd in PD]) - usy = unique([pd.sy for pd in PD]) - uiy = unique([pd.iy for pd in PD]) - if len(uTabs)<=0: - raise Exception('No Table. Contact developer') - if len(uTabs)==1: - mode='1Tab_nCols' - else: - if PD[0].SameCol: - mode='nTabs_SameCols' - else: - # Now that we allow multiple selections detecting "simColumns" is more difficult - if len(uTabs) == len(PD): - mode='nTabs_1Col' - elif self.selMode=='simColumnsMode': - mode='nTabs_SimCols' - else: - mode='nTabs_mCols' - return mode - - def findSubPlots(self,PD,mode): - uTabs = unique([pd.it for pd in PD]) - usy = unique([pd.sy for pd in PD]) - bSubPlots = self.cbSub.IsChecked() - bCompare = self.pltTypePanel.cbCompare.GetValue() # NOTE bCompare somehow always 1Tab_nCols - nSubPlots=1 - spreadBy='none' - self.infoPanel.setTabMode(mode) - if mode=='1Tab_nCols': - if bSubPlots: - if bCompare or len(uTabs)==1: - nSubPlots = self.infoPanel.getNumberOfSubplots(PD, bSubPlots) - else: - nSubPlots=len(usy) - spreadBy='iy' - elif mode=='nTabs_SameCols': - if bSubPlots: - if bCompare: - print('>>>TODO ',mode,len(usy),len(uTabs)) - else: - if len(usy)==1: - # Temporary hack until we have an option for spread by tabs or col - nSubPlots=len(uTabs) - spreadBy='it' - else: - nSubPlots=len(usy) - spreadBy='iy' - elif mode=='nTabs_SimCols': - if bSubPlots: - if bCompare: - print('>>>TODO ',mode,len(usy),len(uTabs)) - else: - nSubPlots=int(len(PD)/len(uTabs)) - spreadBy='mod-ip' - elif mode=='nTabs_mCols': - if bSubPlots: - if bCompare: - print('>>>TODO ',mode,len(usy),len(uTabs)) - else: - if bCompare or len(uTabs)==1: - nSubPlots = self.infoPanel.getNumberOfSubplots(PD, bSubPlots) - else: - nSubPlots=len(PD) - spreadBy='mod-ip' - elif mode=='nTabs_1Col': - if bSubPlots: - if bCompare: - print('>>> TODO',mode,len(uTabs)) - else: - nSubPlots=len(uTabs) - spreadBy='it' - else: - raise Exception('Unknown mode, contact developer.') - return nSubPlots,spreadBy - - def distributePlots(self,mode,nSubPlots,spreadBy): - """ Assigns plot data to axes and axes to plot data """ - axes=self.fig.axes - - # Link plot data to axes - if nSubPlots==1 or spreadBy=='none': - axes[0].iPD=[i for i in range(len(self.plotData))] - else: - for ax in axes: - ax.iPD=[] - PD=self.plotData - uTabs=unique([pd.it for pd in PD]) - uiy=unique([pd.iy for pd in PD]) - if spreadBy=='iy': - for ipd,pd in enumerate(PD): - i=uiy.index(pd.iy) - if i < len(axes): - axes[i].iPD.append(ipd) - elif spreadBy=='it': - for ipd,pd in enumerate(PD): - i=uTabs.index(pd.it) - axes[i].iPD.append(ipd) - elif spreadBy=='mod-ip': - for ipd,pd in enumerate(PD): - i=np.mod(ipd, nSubPlots) - axes[i].iPD.append(ipd) - else: - raise Exception('Wrong spreadby value') - - def setLegendLabels(self,mode): - """ Set labels for legend """ - if mode=='1Tab_nCols': - for pd in self.plotData: - if self.pltTypePanel.cbMinMax.GetValue(): - pd.syl = no_unit(pd.sy) - else: - pd.syl = pd.sy - - elif mode=='nTabs_SameCols': - for pd in self.plotData: - pd.syl=pd.st - - elif mode=='nTabs_1Col': - usy=unique([pd.sy for pd in self.plotData]) - if len(usy)==1: - for pd in self.plotData: - pd.syl=pd.st - else: - for pd in self.plotData: - if self.pltTypePanel.cbMinMax.GetValue(): - pd.syl=no_unit(pd.sy) - else: - pd.syl=pd.sy #pd.syl=pd.st + ' - '+pd.sy - elif mode=='nTabs_SimCols': - bSubPlots = self.cbSub.IsChecked() - if bSubPlots: # spread by table name - for pd in self.plotData: - pd.syl=pd.st - else: - for pd in self.plotData: - pd.syl=pd.st + ' - '+pd.sy - elif mode=='nTabs_mCols': - usy=unique([pd.sy for pd in self.plotData]) - bSubPlots = self.cbSub.IsChecked() - if bSubPlots and len(usy)==1: # spread by table name - for pd in self.plotData: - pd.syl=pd.st - else: - for pd in self.plotData: - pd.syl=pd.st + ' - '+pd.sy - else: - raise Exception('Unknown mode {}'.format(mode)) - - - def empty(self): - self.cleanPlot() - - def clean_memory(self): - if hasattr(self,'plotData'): - del self.plotData - self.plotData=[] - for ax in self.fig.axes: - ax.iPD=[] - self.fig.delaxes(ax) - gc.collect() - - def clean_memory_plot(self): - pass - - def cleanPlot(self): - for ax in self.fig.axes: - if hasattr(ax,'iPD'): - del ax.iPD - self.fig.delaxes(ax) - gc.collect() - self.fig.add_subplot(111) - ax = self.fig.axes[0] - ax.set_axis_off() - #ax.plot(1,1) - self.canvas.draw() - gc.collect() - - def load_and_draw(self): - """ Full draw event: - - Get plot data based on selection - - Plot them - - Trigger changes to infoPanel - - """ - self.clean_memory() - self.getPlotData(self.pltTypePanel.plotType()) - if len(self.plotData)==0: - self.cleanPlot(); - return - mode=self.findPlotMode(self.plotData) - if self.pltTypePanel.cbCompare.GetValue(): - self.PD_Compare(mode) - if len(self.plotData)==0: - self.cleanPlot(); - return - self.redraw_same_data() - if self.infoPanel is not None: - self.infoPanel.showStats(self.plotData,self.pltTypePanel.plotType()) - - def redraw_same_data(self, keep_limits=True): - if len(self.plotData)==0: - self.cleanPlot(); - return - elif len(self.plotData) == 1: - if self.plotData[0].xIsString or self.plotData[0].yIsString or self.plotData[0].xIsDate or self.plotData[0].yIsDate: - self.cbAutoScale.SetValue(True) - else: - if len(self.xlim_prev)==0: # Might occur if some date didn't plot before (e.g. strings) - self.cbAutoScale.SetValue(True) - elif rectangleOverlap(self.plotData[0]._xMin[0], self.plotData[0]._yMin[0], - self.plotData[0]._xMax[0], self.plotData[0]._yMax[0], - self.xlim_prev[0][0], self.ylim_prev[0][0], - self.xlim_prev[0][1], self.ylim_prev[0][1]): - pass - else: - self.cbAutoScale.SetValue(True) - - mode=self.findPlotMode(self.plotData) - nPlots,spreadBy=self.findSubPlots(self.plotData,mode) - - self.clean_memory_plot() - self.set_subplots(nPlots) - self.distributePlots(mode,nPlots,spreadBy) - - if not self.pltTypePanel.cbCompare.GetValue(): - self.setLegendLabels(mode) - - self.plot_all(keep_limits) - self.canvas.draw() - - - def _store_limits(self): - self.xlim_prev = [] - self.ylim_prev = [] - for ax in self.fig.axes: - self.xlim_prev.append(ax.get_xlim()) - self.ylim_prev.append(ax.get_ylim()) - - def _restore_limits(self): - for ax, xlim, ylim in zip(self.fig.axes, self.xlim_prev, self.ylim_prev): - ax.set_xlim(xlim) - ax.set_ylim(ylim) - - -if __name__ == '__main__': - import pandas as pd; - from Tables import Table,TableList - - app = wx.App(False) - self=wx.Frame(None,-1,"Title") - self.SetSize((800, 600)) - #self.SetBackgroundColour('red') - class FakeSelPanel(wx.Panel): - def __init__(self, parent): - super(FakeSelPanel,self).__init__(parent) - d ={'ColA': np.linspace(0,1,100)+1,'ColB': np.random.normal(0,1,100)+0,'ColC':np.random.normal(0,1,100)+1} - df = pd.DataFrame(data=d) - self.tabList=TableList([Table(data=df)]) - - def getPlotDataSelection(self): - ID=[] - ID.append([0,0,2,'x','ColB','tab']) - ID.append([0,0,3,'x','ColC','tab']) - return ID,True - - selpanel=FakeSelPanel(self) - # selpanel.SetBackgroundColour('blue') - p1=PlotPanel(self,selpanel) - p1.load_and_draw() - #p1=SpectralCtrlPanel(self) - sizer = wx.BoxSizer(wx.VERTICAL) - sizer.Add(selpanel,0, flag = wx.EXPAND|wx.ALL,border = 10) - sizer.Add(p1,1, flag = wx.EXPAND|wx.ALL,border = 10) - self.SetSizer(sizer) - - self.Center() - self.Layout() - self.SetSize((800, 600)) - self.Show() - self.SendSizeEvent() - - #p1.showStats(None,[tab],[0],[0,1],tab.columns,0,erase=False) - - app.MainLoop() - - +import os +import numpy as np +import wx +import wx.lib.buttons as buttons +import dateutil # required by matplotlib +#from matplotlib import pyplot as plt +import matplotlib +matplotlib.use('wxAgg') # Important for Windows version of installer. NOTE: changed from Agg to wxAgg +from matplotlib import rc as matplotlib_rc +try: + from matplotlib.backends.backend_wxagg import FigureCanvasWxAgg as FigureCanvas +except Exception as e: + print('') + print('Error: problem importing `matplotlib.backends.backend_wx`.') + import platform + if platform.system()=='Darwin': + print('') + print('pyDatView help:') + print(' This is a typical issue on MacOS, most likely you are') + print(' using the native MacOS python with the native matplolib') + print(' library, which is incompatible with `wxPython`.') + print('') + print(' You can solve this by either:') + print(' - using python3, and pip3 e.g. installing it with brew') + print(' - using a virtual environment with python 2 or 3') + print(' - using anaconda with python 2 or 3'); + print('') + import sys + sys.exit(1) + else: + raise e +from matplotlib.figure import Figure +from matplotlib.pyplot import rcParams as pyplot_rc +from matplotlib import font_manager +from pandas.plotting import register_matplotlib_converters + +import gc + +from .common import * # unique, CHAR +from .plotdata import PlotData, compareMultiplePD +from .GUICommon import * +from .GUIToolBox import MyMultiCursor, MyNavigationToolbar2Wx, TBAddTool, TBAddCheckTool +from .GUIMeasure import GUIMeasure +from . import icons + +font = {'size' : 8} +matplotlib_rc('font', **font) +pyplot_rc['agg.path.chunksize'] = 20000 + + +class PDFCtrlPanel(wx.Panel): + def __init__(self, parent): + super(PDFCtrlPanel,self).__init__(parent) + self.parent = parent + lb = wx.StaticText( self, -1, 'Number of bins:') + self.scBins = wx.SpinCtrl(self, value='51',size=wx.Size(70,-1), style=wx.TE_RIGHT) + self.scBins.SetRange(3, 10000) + self.cbSmooth = wx.CheckBox(self, -1, 'Smooth',(10,10)) + self.cbSmooth.SetValue(False) + dummy_sizer = wx.BoxSizer(wx.HORIZONTAL) + dummy_sizer.Add(lb ,0, flag = wx.CENTER|wx.LEFT,border = 1) + dummy_sizer.Add(self.scBins ,0, flag = wx.CENTER|wx.LEFT,border = 1) + dummy_sizer.Add(self.cbSmooth ,0, flag = wx.CENTER|wx.LEFT,border = 6) + self.SetSizer(dummy_sizer) + self.Bind(wx.EVT_TEXT , self.onPDFOptionChange, self.scBins) + self.Bind(wx.EVT_CHECKBOX, self.onPDFOptionChange) + self.Hide() + + def onPDFOptionChange(self,event=None): + self.parent.load_and_draw(); # DATA HAS CHANGED + +class MinMaxPanel(wx.Panel): + def __init__(self, parent): + super(MinMaxPanel,self).__init__(parent) + self.parent = parent + self.cbxMinMax = wx.CheckBox(self, -1, 'xMinMax',(10,10)) + self.cbyMinMax = wx.CheckBox(self, -1, 'yMinMax',(10,10)) + self.cbxMinMax.SetValue(False) + self.cbyMinMax.SetValue(True) + dummy_sizer = wx.BoxSizer(wx.HORIZONTAL) + dummy_sizer.Add(self.cbxMinMax ,0, flag=wx.CENTER|wx.LEFT, border = 1) + dummy_sizer.Add(self.cbyMinMax ,0, flag=wx.CENTER|wx.LEFT, border = 1) + self.SetSizer(dummy_sizer) + self.Bind(wx.EVT_CHECKBOX, self.onMinMaxChange) + self.Hide() + + def onMinMaxChange(self,event=None): + self.parent.load_and_draw(); # DATA HAS CHANGED + +class CompCtrlPanel(wx.Panel): + def __init__(self, parent): + super(CompCtrlPanel,self).__init__(parent) + self.parent = parent + lblList = ['Relative', '|Relative|','Ratio','Absolute','Y-Y'] + self.rbType = wx.RadioBox(self, label = 'Type', choices = lblList, + majorDimension = 1, style = wx.RA_SPECIFY_ROWS) + dummy_sizer = wx.BoxSizer(wx.HORIZONTAL) + dummy_sizer.Add(self.rbType ,0, flag = wx.CENTER|wx.LEFT,border = 1) + self.SetSizer(dummy_sizer) + self.rbType.Bind(wx.EVT_RADIOBOX,self.onTypeChange) + self.Hide() + + def onTypeChange(self,e): + self.parent.load_and_draw(); # DATA HAS CHANGED + + +class SpectralCtrlPanel(wx.Panel): + def __init__(self, parent): + super(SpectralCtrlPanel,self).__init__(parent) + self.parent = parent + # --- GUI widgets + lb = wx.StaticText( self, -1, 'Type:') + self.cbType = wx.ComboBox(self, choices=['PSD','f x PSD','Amplitude'] , style=wx.CB_READONLY) + self.cbType.SetSelection(0) + lbAveraging = wx.StaticText( self, -1, 'Avg.:') + self.cbAveraging = wx.ComboBox(self, choices=['None','Welch'] , style=wx.CB_READONLY) + self.cbAveraging.SetSelection(1) + self.lbAveragingMethod = wx.StaticText( self, -1, 'Window:') + self.cbAveragingMethod = wx.ComboBox(self, choices=['Hamming','Hann','Rectangular'] , style=wx.CB_READONLY) + self.cbAveragingMethod.SetSelection(0) + self.lbP2 = wx.StaticText( self, -1, '2^n:') + self.scP2 = wx.SpinCtrl(self, value='11',size=wx.Size(40,-1)) + self.lbWinLength = wx.StaticText( self, -1, '(2048) ') + self.scP2.SetRange(3, 19) + lbMaxFreq = wx.StaticText( self, -1, 'Xlim:') + self.tMaxFreq = wx.TextCtrl(self,size = (30,-1),style=wx.TE_PROCESS_ENTER) + self.tMaxFreq.SetValue("-1") + self.cbDetrend = wx.CheckBox(self, -1, 'Detrend',(10,10)) + lbX = wx.StaticText( self, -1, 'x:') + self.cbTypeX = wx.ComboBox(self, choices=['1/x','2pi/x','x'] , style=wx.CB_READONLY) + self.cbTypeX.SetSelection(0) + # Layout + dummy_sizer = wx.BoxSizer(wx.HORIZONTAL) + dummy_sizer.Add(lb ,0, flag = wx.CENTER|wx.LEFT,border = 1) + dummy_sizer.Add(self.cbType ,0, flag = wx.CENTER|wx.LEFT,border = 1) + dummy_sizer.Add(lbAveraging ,0, flag = wx.CENTER|wx.LEFT,border = 6) + dummy_sizer.Add(self.cbAveraging ,0, flag = wx.CENTER|wx.LEFT,border = 1) + dummy_sizer.Add(self.lbAveragingMethod,0, flag = wx.CENTER|wx.LEFT,border = 6) + dummy_sizer.Add(self.cbAveragingMethod,0, flag = wx.CENTER|wx.LEFT,border = 1) + dummy_sizer.Add(self.lbP2 ,0, flag = wx.CENTER|wx.LEFT,border = 6) + dummy_sizer.Add(self.scP2 ,0, flag = wx.CENTER|wx.LEFT,border = 1) + dummy_sizer.Add(self.lbWinLength ,0, flag = wx.CENTER|wx.LEFT,border = 1) + dummy_sizer.Add(lbMaxFreq ,0, flag = wx.CENTER|wx.LEFT,border = 6) + dummy_sizer.Add(self.tMaxFreq ,0, flag = wx.CENTER|wx.LEFT,border = 1) + dummy_sizer.Add(lbX ,0, flag = wx.CENTER|wx.LEFT,border = 6) + dummy_sizer.Add(self.cbTypeX ,0, flag = wx.CENTER|wx.LEFT,border = 1) + dummy_sizer.Add(self.cbDetrend ,0, flag = wx.CENTER|wx.LEFT,border = 7) + self.SetSizer(dummy_sizer) + self.Bind(wx.EVT_COMBOBOX ,self.onSpecCtrlChange) + self.Bind(wx.EVT_TEXT ,self.onP2ChangeText ,self.scP2 ) + self.Bind(wx.EVT_TEXT_ENTER,self.onXlimChange ,self.tMaxFreq ) + self.Bind(wx.EVT_CHECKBOX ,self.onDetrendChange ,self.cbDetrend) + self.Hide() + + def onXlimChange(self,event=None): + self.parent.redraw_same_data(); + def onSpecCtrlChange(self,event=None): + self.parent.load_and_draw() # Data changes + def onDetrendChange(self,event=None): + self.parent.load_and_draw() # Data changes + + def onP2ChangeText(self,event=None): + nExp=self.scP2.GetValue() + self.updateP2(nExp) + self.parent.load_and_draw() # Data changes + + def updateP2(self,P2): + self.lbWinLength.SetLabel("({})".format(2**P2)) + + + + +class PlotTypePanel(wx.Panel): + def __init__(self, parent): + # Superclass constructor + super(PlotTypePanel,self).__init__(parent) + #self.SetBackgroundColour('yellow') + # data + self.parent = parent + # --- Ctrl Panel + self.cbRegular = wx.RadioButton(self, -1, 'Regular',style=wx.RB_GROUP) + self.cbPDF = wx.RadioButton(self, -1, 'PDF' , ) + self.cbFFT = wx.RadioButton(self, -1, 'FFT' , ) + self.cbMinMax = wx.RadioButton(self, -1, 'MinMax' , ) + self.cbCompare = wx.RadioButton(self, -1, 'Compare', ) + self.cbRegular.SetValue(True) + self.Bind(wx.EVT_RADIOBUTTON, self.pdf_select , self.cbPDF ) + self.Bind(wx.EVT_RADIOBUTTON, self.fft_select , self.cbFFT ) + self.Bind(wx.EVT_RADIOBUTTON, self.minmax_select , self.cbMinMax ) + self.Bind(wx.EVT_RADIOBUTTON, self.compare_select, self.cbCompare) + self.Bind(wx.EVT_RADIOBUTTON, self.regular_select, self.cbRegular) + # LAYOUT + cb_sizer = wx.FlexGridSizer(rows=5, cols=1, hgap=0, vgap=0) + cb_sizer.Add(self.cbRegular , 0, flag=wx.ALL, border=1) + cb_sizer.Add(self.cbPDF , 0, flag=wx.ALL, border=1) + cb_sizer.Add(self.cbFFT , 0, flag=wx.ALL, border=1) + cb_sizer.Add(self.cbMinMax , 0, flag=wx.ALL, border=1) + cb_sizer.Add(self.cbCompare , 0, flag=wx.ALL, border=1) + self.SetSizer(cb_sizer) + + def plotType(self): + plotType='Regular' + if self.cbMinMax.GetValue(): + plotType='MinMax' + elif self.cbPDF.GetValue(): + plotType='PDF' + elif self.cbFFT.GetValue(): + plotType='FFT' + elif self.cbCompare.GetValue(): + plotType='Compare' + return plotType + + def regular_select(self, event=None): + self.clear_measures() + self.parent.cbLogY.SetValue(False) + # + self.parent.spcPanel.Hide(); + self.parent.pdfPanel.Hide(); + self.parent.cmpPanel.Hide(); + self.parent.mmxPanel.Hide(); + self.parent.slEsth.Hide(); + self.parent.plotsizer.Layout() + # + self.parent.load_and_draw() # Data changes + + def compare_select(self, event=None): + self.clear_measures() + self.parent.cbLogY.SetValue(False) + self.parent.show_hide(self.parent.cmpPanel, self.cbCompare.GetValue()) + self.parent.spcPanel.Hide(); + self.parent.pdfPanel.Hide(); + self.parent.mmxPanel.Hide(); + self.parent.plotsizer.Layout() + self.parent.load_and_draw() # Data changes + + def fft_select(self, event=None): + self.clear_measures() + self.parent.show_hide(self.parent.spcPanel, self.cbFFT.GetValue()) + self.parent.cbLogY.SetValue(self.cbFFT.GetValue()) + self.parent.pdfPanel.Hide(); + self.parent.mmxPanel.Hide(); + self.parent.plotsizer.Layout() + self.parent.load_and_draw() # Data changes + + def pdf_select(self, event=None): + self.clear_measures() + self.parent.cbLogX.SetValue(False) + self.parent.cbLogY.SetValue(False) + self.parent.show_hide(self.parent.pdfPanel, self.cbPDF.GetValue()) + self.parent.spcPanel.Hide(); + self.parent.cmpPanel.Hide(); + self.parent.mmxPanel.Hide(); + self.parent.plotsizer.Layout() + self.parent.load_and_draw() # Data changes + + def minmax_select(self, event): + self.clear_measures() + self.parent.cbLogY.SetValue(False) + self.parent.show_hide(self.parent.mmxPanel, self.cbMinMax.GetValue()) + self.parent.spcPanel.Hide(); + self.parent.pdfPanel.Hide(); + self.parent.cmpPanel.Hide(); + self.parent.plotsizer.Layout() + self.parent.load_and_draw() # Data changes + + def clear_measures(self): + self.parent.rightMeasure.clear() + self.parent.leftMeasure.clear() + self.parent.lbDeltaX.SetLabel('') + self.parent.lbDeltaY.SetLabel('') + +class EstheticsPanel(wx.Panel): + def __init__(self, parent): + wx.Panel.__init__(self, parent) + self.parent=parent + #self.SetBackgroundColour('red') + + lbFont = wx.StaticText( self, -1, 'Font:') + self.cbFont = wx.ComboBox(self, choices=['6','7','8','9','10','11','12','13','14','15','16','17','18'] , style=wx.CB_READONLY) + self.cbFont.SetSelection(2) + # NOTE: we don't offer "best" since best is slow + lbLegend = wx.StaticText( self, -1, 'Legend:') + self.cbLegend = wx.ComboBox(self, choices=['None','Upper right','Upper left','Lower left','Lower right','Right','Center left','Center right','Lower center','Upper center','Center'] , style=wx.CB_READONLY) + self.cbLegend.SetSelection(1) + lbLgdFont = wx.StaticText( self, -1, 'Legend font:') + self.cbLgdFont = wx.ComboBox(self, choices=['6','7','8','9','10','11','12','13','14','15','16','17','18'] , style=wx.CB_READONLY) + self.cbLgdFont.SetSelection(2) + lbLW = wx.StaticText( self, -1, 'Line width:') + self.cbLW = wx.ComboBox(self, choices=['0.5','1.0','1.5','2.0','2.5','3.0'] , style=wx.CB_READONLY) + self.cbLW.SetSelection(2) + lbMS = wx.StaticText( self, -1, 'Marker size:') + self.cbMS= wx.ComboBox(self, choices=['0.5','1','2','3','4','5','6','7','8'] , style=wx.CB_READONLY) + self.cbMS.SetSelection(2) + + # Layout + #dummy_sizer = wx.BoxSizer(wx.HORIZONTAL) + dummy_sizer = wx.WrapSizer(orient=wx.HORIZONTAL) + dummy_sizer.Add(lbFont ,0, flag = wx.CENTER|wx.LEFT,border = 1) + dummy_sizer.Add(self.cbFont ,0, flag = wx.CENTER|wx.LEFT,border = 1) + dummy_sizer.Add(lbLW ,0, flag = wx.CENTER|wx.LEFT,border = 5) + dummy_sizer.Add(self.cbLW ,0, flag = wx.CENTER|wx.LEFT,border = 1) + dummy_sizer.Add(lbMS ,0, flag = wx.CENTER|wx.LEFT,border = 5) + dummy_sizer.Add(self.cbMS ,0, flag = wx.CENTER|wx.LEFT,border = 1) + dummy_sizer.Add(lbLegend ,0, flag = wx.CENTER|wx.LEFT,border = 5) + dummy_sizer.Add(self.cbLegend ,0, flag = wx.CENTER|wx.LEFT,border = 1) + dummy_sizer.Add(lbLgdFont ,0, flag = wx.CENTER|wx.LEFT,border = 5) + dummy_sizer.Add(self.cbLgdFont ,0, flag = wx.CENTER|wx.LEFT,border = 1) + self.SetSizer(dummy_sizer) + self.Hide() + # Callbacks + self.Bind(wx.EVT_COMBOBOX ,self.onAnyEsthOptionChange) + self.cbFont.Bind(wx.EVT_COMBOBOX ,self.onFontOptionChange) + + def onAnyEsthOptionChange(self,event=None): + self.parent.redraw_same_data() + + def onFontOptionChange(self,event=None): + matplotlib_rc('font', **{'size':int(self.cbFont.Value) }) # affect all (including ticks) + self.onAnyEsthOptionChange() + + +class PlotPanel(wx.Panel): + def __init__(self, parent, selPanel,infoPanel=None, mainframe=None): + + # Superclass constructor + super(PlotPanel,self).__init__(parent) + + # Font handling + font = parent.GetFont() + font.SetPointSize(font.GetPointSize()-1) + self.SetFont(font) + # Preparing a special font manager for chinese characters + self.specialFont=None + try: + pyplot_path = matplotlib.get_data_path() + except: + pyplot_path = pyplot_rc['datapath'] + CH_F_PATHS = [ + os.path.join(pyplot_path, 'fonts/ttf/SimHei.ttf'), + os.path.join(os.path.dirname(__file__),'../SimHei.ttf')] + for fpath in CH_F_PATHS: + if os.path.exists(fpath): + fontP = font_manager.FontProperties(fname=fpath) + fontP.set_size(font.GetPointSize()) + self.specialFont=fontP + break + # data + self.selPanel = selPanel # <<< dependency with selPanel should be minimum + self.selMode = '' + self.infoPanel=infoPanel + self.infoPanel.setPlotMatrixCallbacks(self._onPlotMatrixLeftClick, self._onPlotMatrixRightClick) + self.parent = parent + self.mainframe= mainframe + self.plotData = [] + self.plotDataOptions=dict() + if self.selPanel is not None: + bg=self.selPanel.BackgroundColour + self.SetBackgroundColour(bg) # sowhow, our parent has a wrong color + #self.SetBackgroundColour('red') + self.leftMeasure = GUIMeasure(1, 'firebrick') + self.rightMeasure = GUIMeasure(2, 'darkgreen') + self.xlim_prev = [[0, 1]] + self.ylim_prev = [[0, 1]] + # GUI + self.fig = Figure(facecolor="white", figsize=(1, 1)) + register_matplotlib_converters() + self.canvas = FigureCanvas(self, -1, self.fig) + self.canvas.mpl_connect('motion_notify_event', self.onMouseMove) + self.canvas.mpl_connect('button_press_event', self.onMouseClick) + self.canvas.mpl_connect('button_release_event', self.onMouseRelease) + self.canvas.mpl_connect('draw_event', self.onDraw) + self.clickLocation = (None, 0, 0) + + self.navTBTop = MyNavigationToolbar2Wx(self.canvas, ['Home', 'Pan']) + self.navTBBottom = MyNavigationToolbar2Wx(self.canvas, ['Subplots', 'Save']) + TBAddCheckTool(self.navTBBottom,'', icons.chart.GetBitmap(), self.onEsthToggle) + self.esthToggle=False + + self.navTBBottom.Realize() + + #self.navTB = wx.ToolBar(self, style=wx.TB_HORIZONTAL|wx.TB_HORZ_LAYOUT|wx.TB_NODIVIDER|wx.TB_FLAT) + #self.navTB.SetMargins(0,0) + #self.navTB.SetToolPacking(0) + #self.navTB.AddCheckTool(-1, label='', bitmap1=icons.chart.GetBitmap()) + #self.navTB.Realize() + + self.toolbar_sizer = wx.BoxSizer(wx.VERTICAL) + self.toolbar_sizer.Add(self.navTBTop) + self.toolbar_sizer.Add(self.navTBBottom) + + + # --- Tool Panel + self.toolSizer= wx.BoxSizer(wx.VERTICAL) + # --- PlotType Panel + self.pltTypePanel= PlotTypePanel(self); + # --- Plot type specific options + self.spcPanel = SpectralCtrlPanel(self) + self.pdfPanel = PDFCtrlPanel(self) + self.cmpPanel = CompCtrlPanel(self) + self.mmxPanel = MinMaxPanel(self) + # --- Esthetics panel + self.esthPanel = EstheticsPanel(self) + + + # --- Ctrl Panel + self.ctrlPanel= wx.Panel(self) + #self.ctrlPanel.SetBackgroundColour('blue') + # Check Boxes + self.cbCurveType = wx.ComboBox(self.ctrlPanel, choices=['Plain','LS','Markers','Mix'] , style=wx.CB_READONLY) + self.cbCurveType.SetSelection(1) + self.cbSub = wx.CheckBox(self.ctrlPanel, -1, 'Subplot',(10,10)) + self.cbLogX = wx.CheckBox(self.ctrlPanel, -1, 'Log-x',(10,10)) + self.cbLogY = wx.CheckBox(self.ctrlPanel, -1, 'Log-y',(10,10)) + self.cbSync = wx.CheckBox(self.ctrlPanel, -1, 'Sync-x',(10,10)) + self.cbXHair = wx.CheckBox(self.ctrlPanel, -1, 'CrossHair',(10,10)) + self.cbPlotMatrix = wx.CheckBox(self.ctrlPanel, -1, 'Matrix',(10,10)) + self.cbAutoScale = wx.CheckBox(self.ctrlPanel, -1, 'AutoScale',(10,10)) + self.cbGrid = wx.CheckBox(self.ctrlPanel, -1, 'Grid',(10,10)) + self.cbStepPlot = wx.CheckBox(self.ctrlPanel, -1, 'StepPlot',(10,10)) + self.cbMeasure = wx.CheckBox(self.ctrlPanel, -1, 'Measure',(10,10)) + #self.cbSub.SetValue(True) # DEFAULT TO SUB? + self.cbSync.SetValue(True) + self.cbXHair.SetValue(True) # Have cross hair by default + self.cbAutoScale.SetValue(True) + # Callbacks + self.Bind(wx.EVT_CHECKBOX, self.redraw_event , self.cbSub ) + self.Bind(wx.EVT_COMBOBOX, self.redraw_event , self.cbCurveType) + self.Bind(wx.EVT_CHECKBOX, self.log_select , self.cbLogX ) + self.Bind(wx.EVT_CHECKBOX, self.log_select , self.cbLogY ) + self.Bind(wx.EVT_CHECKBOX, self.redraw_event , self.cbSync ) + self.Bind(wx.EVT_CHECKBOX, self.crosshair_event , self.cbXHair ) + self.Bind(wx.EVT_CHECKBOX, self.plot_matrix_select, self.cbPlotMatrix ) + self.Bind(wx.EVT_CHECKBOX, self.redraw_event , self.cbAutoScale ) + self.Bind(wx.EVT_CHECKBOX, self.redraw_event , self.cbGrid ) + self.Bind(wx.EVT_CHECKBOX, self.redraw_event , self.cbStepPlot ) + self.Bind(wx.EVT_CHECKBOX, self.measure_select , self.cbMeasure ) + self.Bind(wx.EVT_CHECKBOX, self.measure_select , self.cbMeasure ) + # LAYOUT + cb_sizer = wx.FlexGridSizer(rows=4, cols=3, hgap=0, vgap=0) + cb_sizer.Add(self.cbCurveType , 0, flag=wx.ALL, border=1) + cb_sizer.Add(self.cbSub , 0, flag=wx.ALL, border=1) + cb_sizer.Add(self.cbAutoScale , 0, flag=wx.ALL, border=1) + cb_sizer.Add(self.cbLogX , 0, flag=wx.ALL, border=1) + cb_sizer.Add(self.cbLogY , 0, flag=wx.ALL, border=1) + cb_sizer.Add(self.cbStepPlot , 0, flag=wx.ALL, border=1) + cb_sizer.Add(self.cbXHair , 0, flag=wx.ALL, border=1) + cb_sizer.Add(self.cbGrid , 0, flag=wx.ALL, border=1) + cb_sizer.Add(self.cbSync , 0, flag=wx.ALL, border=1) + cb_sizer.Add(self.cbPlotMatrix, 0, flag=wx.ALL, border=1) + cb_sizer.Add(self.cbMeasure , 0, flag=wx.ALL, border=1) + + self.ctrlPanel.SetSizer(cb_sizer) + + # --- Crosshair Panel + crossHairPanel= wx.Panel(self) + self.lbCrossHairX = wx.StaticText(crossHairPanel, -1, 'x = ... ') + self.lbCrossHairY = wx.StaticText(crossHairPanel, -1, 'y = ... ') + self.lbDeltaX = wx.StaticText(crossHairPanel, -1, ' ') + self.lbDeltaY = wx.StaticText(crossHairPanel, -1, ' ') + self.lbCrossHairX.SetFont(getMonoFont(self)) + self.lbCrossHairY.SetFont(getMonoFont(self)) + self.lbDeltaX.SetFont(getMonoFont(self)) + self.lbDeltaY.SetFont(getMonoFont(self)) + cbCH = wx.FlexGridSizer(rows=4, cols=1, hgap=0, vgap=0) + cbCH.Add(self.lbCrossHairX , 0, flag=wx.ALL, border=1) + cbCH.Add(self.lbCrossHairY , 0, flag=wx.ALL, border=1) + cbCH.Add(self.lbDeltaX , 0, flag=wx.ALL, border=1) + cbCH.Add(self.lbDeltaY , 0, flag=wx.ALL, border=1) + crossHairPanel.SetSizer(cbCH) + + # --- layout of panels + row_sizer = wx.BoxSizer(wx.HORIZONTAL) + sl2 = wx.StaticLine(self, -1, size=wx.Size(1,-1), style=wx.LI_VERTICAL) + sl3 = wx.StaticLine(self, -1, size=wx.Size(1,-1), style=wx.LI_VERTICAL) + sl4 = wx.StaticLine(self, -1, size=wx.Size(1,-1), style=wx.LI_VERTICAL) + row_sizer.Add(self.pltTypePanel , 0 , flag=wx.LEFT|wx.RIGHT|wx.CENTER , border=1) + row_sizer.Add(sl2 , 0 , flag=wx.LEFT|wx.RIGHT|wx.EXPAND|wx.CENTER, border=0) + row_sizer.Add(self.toolbar_sizer, 0 , flag=wx.LEFT|wx.RIGHT|wx.CENTER , border=1) + row_sizer.Add(sl3 , 0 , flag=wx.LEFT|wx.RIGHT|wx.EXPAND|wx.CENTER, border=0) + row_sizer.Add(self.ctrlPanel , 1 , flag=wx.LEFT|wx.RIGHT|wx.EXPAND|wx.CENTER, border=0) + row_sizer.Add(sl4 , 0 , flag=wx.LEFT|wx.RIGHT|wx.EXPAND|wx.CENTER, border=0) + row_sizer.Add(crossHairPanel , 0 , flag=wx.LEFT|wx.RIGHT|wx.EXPAND|wx.CENTER, border=1) + + plotsizer = wx.BoxSizer(wx.VERTICAL) + self.slCtrl = wx.StaticLine(self, -1, size=wx.Size(-1,1), style=wx.LI_HORIZONTAL) + self.slCtrl.Hide() + self.slEsth = wx.StaticLine(self, -1, size=wx.Size(-1,1), style=wx.LI_HORIZONTAL) + self.slEsth.Hide() + sl1 = wx.StaticLine(self, -1, size=wx.Size(-1,1), style=wx.LI_HORIZONTAL) + plotsizer.Add(self.toolSizer,0,flag = wx.EXPAND|wx.CENTER|wx.TOP|wx.BOTTOM,border = 10) + plotsizer.Add(self.canvas ,1,flag = wx.EXPAND,border = 5 ) + plotsizer.Add(sl1 ,0,flag = wx.EXPAND,border = 0) + plotsizer.Add(self.spcPanel ,0,flag = wx.EXPAND|wx.CENTER|wx.TOP|wx.BOTTOM,border = 10) + plotsizer.Add(self.pdfPanel ,0,flag = wx.EXPAND|wx.CENTER|wx.TOP|wx.BOTTOM,border = 10) + plotsizer.Add(self.cmpPanel ,0,flag = wx.EXPAND|wx.CENTER|wx.TOP|wx.BOTTOM,border = 10) + plotsizer.Add(self.mmxPanel ,0,flag = wx.EXPAND|wx.CENTER|wx.TOP|wx.BOTTOM,border = 10) + plotsizer.Add(self.slEsth ,0,flag = wx.EXPAND,border = 0) + plotsizer.Add(self.esthPanel,0,flag = wx.EXPAND|wx.CENTER|wx.TOP|wx.BOTTOM,border = 10) + plotsizer.Add(self.slCtrl ,0,flag = wx.EXPAND,border = 0) + plotsizer.Add(row_sizer ,0,flag = wx.EXPAND|wx.NORTH ,border = 2) + + self.show_hide(self.spcPanel, self.pltTypePanel.cbFFT.GetValue()) + self.show_hide(self.cmpPanel, self.pltTypePanel.cbCompare.GetValue()) + self.show_hide(self.pdfPanel, self.pltTypePanel.cbPDF.GetValue()) + self.show_hide(self.mmxPanel, self.pltTypePanel.cbMinMax.GetValue()) + + self.SetSizer(plotsizer) + self.plotsizer=plotsizer; + self.set_subplot_spacing(init=True) + + def onEsthToggle(self,event): + self.esthToggle=not self.esthToggle + if self.esthToggle: + self.slCtrl.Show() + self.esthPanel.Show() + else: + self.slCtrl.Hide() + self.esthPanel.Hide() + self.plotsizer.Layout() + event.Skip() + + def set_subplot_spacing(self, init=False): + """ + Handle default subplot spacing + + NOTE: + - Tight fails when the ylabel is too long, especially for fft with multiplt signals + - might need to change depending on window size/resizing + - need to change if right axis needed + - this will override the user settings + """ + #self.fig.set_tight_layout(True) # NOTE: works almost fine, but problem with FFT multiple + # TODO this is definitely not generic, but tight fails.. + if init: + # NOTE: at init size is (20,20) because sizer is not initialized yet + bottom = 0.12 + left = 0.12 + else: + if self.Size[1]<300: + bottom=0.20 + elif self.Size[1]<350: + bottom=0.18 + elif self.Size[1]<430: + bottom=0.16 + elif self.Size[1]<600: + bottom=0.13 + elif self.Size[1]<800: + bottom=0.09 + else: + bottom=0.07 + if self.Size[0]<300: + left=0.22 + elif self.Size[0]<450: + left=0.20 + elif self.Size[0]<950: + left=0.12 + else: + left=0.06 + #print(self.Size,'bottom', bottom, 'left',left) + if self.cbPlotMatrix.GetValue(): # TODO detect it + self.fig.subplots_adjust(top=0.97,bottom=bottom,left=left,right=0.98-left) + else: + self.fig.subplots_adjust(top=0.97,bottom=bottom,left=left,right=0.98) + + def plot_matrix_select(self, event): + self.infoPanel.togglePlotMatrix(self.cbPlotMatrix.GetValue()) + self.redraw_same_data() + + def measure_select(self, event): + if self.cbMeasure.IsChecked(): + self.cbAutoScale.SetValue(False) + self.redraw_same_data() + + def redraw_event(self, event): + self.redraw_same_data() + + def log_select(self, event): + if self.pltTypePanel.cbPDF.GetValue(): + self.cbLogX.SetValue(False) + self.cbLogY.SetValue(False) + else: + self.redraw_same_data() + + def crosshair_event(self, event): + try: + self.multiCursors.vertOn =self.cbXHair.GetValue() + self.multiCursors.horizOn=self.cbXHair.GetValue() + self.multiCursors._update() + except: + pass + + def show_hide(self,panel,bShow): + if bShow: + panel.Show() + self.slEsth.Show() + else: + self.slEsth.Hide() + panel.Hide() + + @property + def sharex(self): + return self.cbSync.IsChecked() and (not self.pltTypePanel.cbPDF.GetValue()) + + def set_subplots(self,nPlots): + self.set_subplot_spacing() + # Creating subplots + for ax in self.fig.axes: + self.fig.delaxes(ax) + sharex=None + for i in range(nPlots): + # Vertical stack + if i==0: + ax=self.fig.add_subplot(nPlots,1,i+1) + if self.sharex: + sharex=ax + else: + ax=self.fig.add_subplot(nPlots,1,i+1,sharex=sharex) + # Horizontal stack + #self.fig.add_subplot(1,nPlots,i+1) + + def onMouseMove(self, event): + if event.inaxes: + x, y = event.xdata, event.ydata + self.lbCrossHairX.SetLabel('x =' + self.formatLabelValue(x)) + self.lbCrossHairY.SetLabel('y =' + self.formatLabelValue(y)) + + def onMouseClick(self, event): + self.clickLocation = (event.inaxes, event.xdata, event.ydata) + + def onMouseRelease(self, event): + if self.cbMeasure.GetValue(): + for ax, ax_idx in zip(self.fig.axes, range(len(self.fig.axes))): + if event.inaxes == ax: + x, y = event.xdata, event.ydata + if self.clickLocation != (ax, x, y): + # Ignore measurements for zoom-actions. Possibly add small tolerance. + # Zoom-actions disable autoscale + self.cbAutoScale.SetValue(False) + return + if event.button == 1: + self.infoPanel.setMeasurements((x, y), None) + self.leftMeasure.set(ax_idx, x, y) + self.leftMeasure.plot(ax, ax_idx) + elif event.button == 3: + self.infoPanel.setMeasurements(None, (x, y)) + self.rightMeasure.set(ax_idx, x, y) + self.rightMeasure.plot(ax, ax_idx) + else: + return + if self.cbAutoScale.IsChecked() is False: + self._restore_limits() + + if self.leftMeasure.axis_idx == self.rightMeasure.axis_idx and self.leftMeasure.axis_idx != -1: + self.lbDeltaX.SetLabel('dx=' + self.formatLabelValue(self.rightMeasure.x - self.leftMeasure.x)) + self.lbDeltaY.SetLabel('dy=' + self.formatLabelValue(self.rightMeasure.y - self.leftMeasure.y)) + else: + self.lbDeltaX.SetLabel('') + self.lbDeltaY.SetLabel('') + return + + def onDraw(self, event): + self._store_limits() + + def formatLabelValue(self, value): + try: + if abs(value)<1000 and abs(value)>1e-4: + s = '{:10.5f}'.format(value) + else: + s = '{:10.3e}'.format(value) + except TypeError: + s = ' ' + return s + + def removeTools(self,event=None,Layout=True): + try: + self.toolPanel.destroy() # call the "destroy" function which might clean up data + except: + pass + try: + # Python3 + self.toolSizer.Clear(delete_windows=True) # Delete Windows + except: + # Python2 + if hasattr(self,'toolPanel'): + self.toolSizer.Remove(self.toolPanel) + self.toolPanel.Destroy() + del self.toolPanel + self.toolSizer.Clear() # Delete Windows + if Layout: + self.plotsizer.Layout() + + def showTool(self,toolName=''): + from .GUITools import TOOLS + if toolName in TOOLS.keys(): + self.showToolPanel(TOOLS[toolName]) + else: + raise Exception('Unknown tool {}'.format(toolName)) + + def showToolPanel(self, panelClass): + """ Show a tool panel based on a panel class (should inherit from GUIToolPanel)""" + from .GUITools import TOOLS + self.Freeze() + self.removeTools(Layout=False) + self.toolPanel=panelClass(parent=self) # calling the panel constructor + self.toolSizer.Add(self.toolPanel, 0, wx.EXPAND|wx.ALL, 5) + self.plotsizer.Layout() + self.Thaw() + + + def setPD_PDF(self,PD,c): + """ Convert plot data to PDF data based on GUI options""" + # ---PDF + nBins = self.pdfPanel.scBins.GetValue() + bSmooth = self.pdfPanel.cbSmooth.GetValue() + nBins_out= PD.toPDF(nBins,bSmooth) + if nBins_out!=nBins: + self.pdfPanel.scBins.SetValue(nBins) + + def setPD_MinMax(self,PD): + """ Convert plot data to MinMax data based on GUI options""" + yScale=self.mmxPanel.cbyMinMax.IsChecked() + xScale=self.mmxPanel.cbxMinMax.IsChecked() + try: + PD.toMinMax(xScale,yScale) + except Exception as e: + self.mmxPanel.cbxMinMax.SetValue(False) + raise e # Used to be Warn + + def setPD_FFT(self,pd): + """ Convert plot data to FFT data based on GUI options""" + yType = self.spcPanel.cbType.GetStringSelection() + xType = self.spcPanel.cbTypeX.GetStringSelection() + avgMethod = self.spcPanel.cbAveraging.GetStringSelection() + avgWindow = self.spcPanel.cbAveragingMethod.GetStringSelection() + bDetrend = self.spcPanel.cbDetrend.IsChecked() + nExp = self.spcPanel.scP2.GetValue() + # Convert plotdata to FFT data + try: + Info = pd.toFFT(yType=yType, xType=xType, avgMethod=avgMethod, avgWindow=avgWindow, bDetrend=bDetrend, nExp=nExp) + # Trigger + if hasattr(Info,'nExp') and Info.nExp!=nExp: + self.spcPanel.scP2.SetValue(Info.nExp) + self.spcPanel.updateP2(Info.nExp) + except Exception as e: + self.spcPanel.Hide(); + self.plotsizer.Layout() + raise e + + + def transformPlotData(self,PD): + """" + Apply MinMax, PDF or FFT transform to plot based on GUI data + """ + plotType=self.pltTypePanel.plotType() + if plotType=='MinMax': + self.setPD_MinMax(PD) + elif plotType=='PDF': + self.setPD_PDF(PD,PD.c) + elif plotType=='FFT': + self.setPD_FFT(PD) + + def getPlotData(self,plotType): + ID,SameCol,selMode=self.selPanel.getPlotDataSelection() + self.selMode=selMode # we store the selection mode + del self.plotData + self.plotData=[] + tabs=self.selPanel.tabList.getTabs() # TODO, selPanel should just return the PlotData... + try: + for i,idx in enumerate(ID): + # Initialize each plotdata based on selected table and selected id channels + pd=PlotData(); + pd.fromIDs(tabs,i,idx,SameCol, self.plotDataOptions) + # Possible change of data + if plotType=='MinMax': + self.setPD_MinMax(pd) + elif plotType=='PDF': + self.setPD_PDF(pd,pd.c) + elif plotType=='FFT': + self.setPD_FFT(pd) + self.plotData.append(pd) + except Exception as e: + self.plotData=[] + raise e + + def PD_Compare(self,mode): + """ Perform comparison of the selected PlotData, returns new plotData with the comparison. """ + sComp = self.cmpPanel.rbType.GetStringSelection() + try: + self.plotData = compareMultiplePD(self.plotData,mode, sComp) + except Exception as e: + self.pltTypePanel.cbRegular.SetValue(True) + raise e + + def _onPlotMatrixLeftClick(self, event): + """Toggle plot-states from None, to left-axis, to right-axis. + Left-click goes forwards, right-click goes backwards. + IndexError to avoid "holes" in matrix with outer adjacent populated entries + """ + btn = event.GetEventObject() + label = btn.GetLabelText() + if label == '-': + btn.SetLabel('1') + try: + self.infoPanel.getPlotMatrix(self.plotData, self.cbSub.IsChecked()) + except IndexError: + btn.SetLabel('-') + elif label == '1': + btn.SetLabel('2') + else: + btn.SetLabel('-') + try: + self.infoPanel.getPlotMatrix(self.plotData, self.cbSub.IsChecked()) + except IndexError: + btn.SetLabel('1') + self.redraw_same_data() + + def _onPlotMatrixRightClick(self, event): + btn = event.GetEventObject() + label = btn.GetLabelText() + if label == '-': + btn.SetLabel('2') + try: + self.infoPanel.getPlotMatrix(self.plotData, self.cbSub.IsChecked()) + except IndexError: + btn.SetLabel('-') + elif label == '1': + btn.SetLabel('-') + try: + self.infoPanel.getPlotMatrix(self.plotData, self.cbSub.IsChecked()) + except IndexError: + btn.SetLabel('2') + else: + btn.SetLabel('1') + self.redraw_same_data() + + def set_axes_lim(self, PDs, axis): + """ + It's usually faster to set the axis limits first (before plotting) + and disable autoscaling. This way the limits are not recomputed when plot data are added. + Also, we already have computed the min and max, so we leverage that. + NOTE: + doesnt not work with strings + doesnt not work for FFT and compare + + INPUTS: + PDs: list of plot data + """ + # TODO option for tight axes + tight=False + + plotType=self.pltTypePanel.plotType() + if plotType in ['FFT','Compare']: + axis.autoscale(True, axis='both', tight=tight) + return + vXString=[PDs[i].xIsString for i in axis.iPD] + vYString=[PDs[i].yIsString for i in axis.iPD] + if not any(vXString) and not self.cbLogX.IsChecked(): + try: + xMin=np.min([PDs[i]._xMin[0] for i in axis.iPD]) + xMax=np.max([PDs[i]._xMax[0] for i in axis.iPD]) + if np.isclose(xMin,xMax): + delta=1 if np.isclose(xMax,0) else 0.1*xMax + else: + if tight: + delta=0 + else: + delta = (xMax-xMin)*pyplot_rc['axes.xmargin'] + axis.set_xlim(xMin-delta,xMax+delta) + axis.autoscale(False, axis='x', tight=False) + except: + pass + if not any(vYString) and not self.cbLogY.IsChecked(): + try: + yMin=np.min([PDs[i]._yMin[0] for i in axis.iPD]) + yMax=np.max([PDs[i]._yMax[0] for i in axis.iPD]) + delta = (yMax-yMin)*pyplot_rc['axes.ymargin'] + if np.isclose(yMin,yMax): + delta=1 if np.isclose(yMax,0) else 0.1*yMax + else: + if tight: + delta=0 + else: + delta = (yMax-yMin)*pyplot_rc['axes.xmargin'] + axis.set_ylim(yMin-delta,yMax+delta) + axis.autoscale(False, axis='y', tight=False) + except: + pass + + def plot_all(self, keep_limits=True): + self.multiCursors=[] + + if self.cbMeasure.GetValue() is False: + for measure in [self.leftMeasure, self.rightMeasure]: + measure.clear() + self.infoPanel.setMeasurements(None, None) + self.lbDeltaX.SetLabel('') + self.lbDeltaY.SetLabel('') + + axes=self.fig.axes + PD=self.plotData + + + # --- Plot options + bStep = self.cbStepPlot.IsChecked() + plot_options = dict() + plot_options['lw']=float(self.esthPanel.cbLW.Value) + plot_options['ms']=float(self.esthPanel.cbMS.Value) + if self.cbCurveType.Value=='Plain': + plot_options['LineStyles'] = ['-'] + plot_options['Markers'] = [''] + elif self.cbCurveType.Value=='LS': + plot_options['LineStyles'] = ['-','--','-.',':'] + plot_options['Markers'] = [''] + elif self.cbCurveType.Value=='Markers': + plot_options['LineStyles'] = [''] + plot_options['Markers'] = ['o','d','v','^','s'] + elif self.cbCurveType.Value=='Mix': # NOTE, can be improved + plot_options['LineStyles'] = ['-','--', '-','-','-'] + plot_options['Markers'] = ['' ,'' ,'o','^','s'] + else: + # Combination of linestyles markers, colors, etc. + # But at that stage, if the user really want this, then we can implement an option to set styles per plot. Not high priority. + raise Exception('Not implemented') + + + + # --- Font options + font_options = dict() + font_options_legd = dict() + font_options['size'] = int(self.esthPanel.cbFont.Value) # affect labels + font_options_legd['fontsize'] = int(self.esthPanel.cbLgdFont.Value) + needChineseFont = any([pd.needChineseFont for pd in PD]) + if needChineseFont and self.specialFont is not None: + font_options['fontproperties']= self.specialFont + font_options_legd['prop'] = self.specialFont + + # --- Loop on axes. Either use ax.iPD to chose the plot data, or rely on plotmatrix + for axis_idx, ax_left in enumerate(axes): + ax_right = None + # Checks + vDate=[PD[i].yIsDate for i in ax_left.iPD] + if any(vDate) and len(vDate)>1: + Error(self,'Cannot plot date and other value on the same axis') + return + + # Set limit before plot when possible, for optimization + self.set_axes_lim(PD, ax_left) + + # Actually plot + pm = self.infoPanel.getPlotMatrix(PD, self.cbSub.IsChecked()) + __, bAllNegLeft = self.plotSignals(ax_left, axis_idx, PD, pm, 1, bStep, plot_options) + ax_right, bAllNegRight = self.plotSignals(ax_left, axis_idx, PD, pm, 2, bStep, plot_options) + + self.infoPanel.setMeasurements(self.leftMeasure.get_xydata(), self.rightMeasure.get_xydata()) + for measure in [self.leftMeasure, self.rightMeasure]: + measure.plot(ax_left, axis_idx) + + # Log Axes + if self.cbLogX.IsChecked(): + try: + ax_left.set_xscale("log", nonpositive='clip') # latest + except: + ax_left.set_xscale("log", nonposx='clip') # legacy + + if self.cbLogY.IsChecked(): + if bAllNegLeft is False: + try: + ax_left.set_yscale("log", nonpositive='clip') # latest + except: + ax_left.set_yscale("log", nonposy='clip') + if bAllNegRight is False and ax_right is not None: + try: + ax_right.set_yscale("log", nonpositive='clip') # latest + except: + ax_left.set_yscale("log", nonposy='clip') # legacy + + # XLIM - TODO FFT ONLY NASTY + if self.pltTypePanel.cbFFT.GetValue(): + try: + xlim=float(self.spcPanel.tMaxFreq.GetLineText(0)) + if xlim>0: + ax_left.set_xlim([0,xlim]) + pd=PD[ax_left.iPD[0]] + I=pd.x 0 and len(yleft_labels) <= 3: + ax_left.set_ylabel(' and '.join(yleft_labels), **font_options) + elif ax_left is not None: + ax_left.set_ylabel('') + if len(yright_labels) > 0 and len(yright_labels) <= 3: + ax_right.set_ylabel(' and '.join(yright_labels), **font_options) + elif ax_right is not None: + ax_right.set_ylabel('') + + # Legends + lgdLoc = self.esthPanel.cbLegend.Value.lower() + if (self.pltTypePanel.cbCompare.GetValue() or + ((len(yleft_legends) + len(yright_legends)) > 1)): + if lgdLoc !='none': + if len(yleft_legends) > 0: + ax_left.legend(fancybox=False, loc=lgdLoc, **font_options_legd) + if ax_right is not None and len(yright_legends) > 0: + ax_right.legend(fancybox=False, loc=4, **font_options_legd) + elif len(axes)>1 and len(axes)==len(PD): + # TODO: can this be removed? If there is only one unique signal + # per subplot, normally only ylabel is displayed and no legend. + # Special case when we have subplots and all plots have the same label + if lgdLoc !='none': + usy = unique([pd.sy for pd in PD]) + if len(usy)==1: + for ax in axes: + ax.legend(fancybox=False, loc=lgdLoc, **font_options_legd) + + axes[-1].set_xlabel(PD[axes[-1].iPD[0]].sx, **font_options) + + #print('sy :',[pd.sy for pd in PD]) + #print('syl:',[pd.syl for pd in PD]) + + # --- Cursors for each individual plot + # NOTE: cursors needs to be stored in the object! + #for ax_left in self.fig.axes: + # self.cursors.append(MyCursor(ax_left,horizOn=True, vertOn=False, useblit=True, color='gray', linewidth=0.5, linestyle=':')) + # Vertical cusor for all, commonly + bXHair = self.cbXHair.GetValue() + self.multiCursors = MyMultiCursor(self.canvas, tuple(self.fig.axes), useblit=True, horizOn=bXHair, vertOn=bXHair, color='gray', linewidth=0.5, linestyle=':') + + def plotSignals(self, ax, axis_idx, PD, pm, left_right, is_step, opts): + axis = None + bAllNeg = True + if pm is None: + loop_range = ax.iPD + else: + loop_range = range(len(PD)) + + iPlot=-1 + for signal_idx in loop_range: + do_plot = False + if left_right == 1 and (pm is None or pm[signal_idx][axis_idx] == left_right): + do_plot = True + axis = ax + elif left_right == 2 and pm is not None and pm[signal_idx][axis_idx] == left_right: + do_plot = True + if axis is None: + axis = ax.twinx() + ax.set_zorder(axis.get_zorder()+1) + ax.patch.set_visible(False) + axis._get_lines.prop_cycler = ax._get_lines.prop_cycler + pd=PD[signal_idx] + if do_plot: + iPlot+=1 + # --- styling per plot + if len(pd.x)==1: + marker='o'; ls='' + else: + # TODO allow PlotData to override for "per plot" options in the future + marker = opts['Markers'][np.mod(iPlot,len(opts['Markers']))] + ls = opts['LineStyles'][np.mod(iPlot,len(opts['LineStyles']))] + if is_step: + plot = axis.step + else: + plot = axis.plot + plot(pd.x,pd.y,label=pd.syl,ms=opts['ms'], lw=opts['lw'], marker=marker, ls=ls) + try: + bAllNeg = bAllNeg and all(pd.y<=0) + except: + pass # Dates or strings + return axis, bAllNeg + + def findPlotMode(self,PD): + uTabs = unique([pd.it for pd in PD]) + usy = unique([pd.sy for pd in PD]) + uiy = unique([pd.iy for pd in PD]) + if len(uTabs)<=0: + raise Exception('No Table. Contact developer') + if len(uTabs)==1: + mode='1Tab_nCols' + else: + if PD[0].SameCol: + mode='nTabs_SameCols' + else: + # Now that we allow multiple selections detecting "simColumns" is more difficult + if len(uTabs) == len(PD): + mode='nTabs_1Col' + elif self.selMode=='simColumnsMode': + mode='nTabs_SimCols' + else: + mode='nTabs_mCols' + return mode + + def findSubPlots(self,PD,mode): + uTabs = unique([pd.it for pd in PD]) + usy = unique([pd.sy for pd in PD]) + bSubPlots = self.cbSub.IsChecked() + bCompare = self.pltTypePanel.cbCompare.GetValue() # NOTE bCompare somehow always 1Tab_nCols + nSubPlots=1 + spreadBy='none' + self.infoPanel.setTabMode(mode) + if mode=='1Tab_nCols': + if bSubPlots: + if bCompare or len(uTabs)==1: + nSubPlots = self.infoPanel.getNumberOfSubplots(PD, bSubPlots) + else: + nSubPlots=len(usy) + spreadBy='iy' + elif mode=='nTabs_SameCols': + if bSubPlots: + if bCompare: + print('>>>TODO ',mode,len(usy),len(uTabs)) + else: + if len(usy)==1: + # Temporary hack until we have an option for spread by tabs or col + nSubPlots=len(uTabs) + spreadBy='it' + else: + nSubPlots=len(usy) + spreadBy='iy' + elif mode=='nTabs_SimCols': + if bSubPlots: + if bCompare: + print('>>>TODO ',mode,len(usy),len(uTabs)) + else: + nSubPlots=int(len(PD)/len(uTabs)) + spreadBy='mod-ip' + elif mode=='nTabs_mCols': + if bSubPlots: + if bCompare: + print('>>>TODO ',mode,len(usy),len(uTabs)) + else: + if bCompare or len(uTabs)==1: + nSubPlots = self.infoPanel.getNumberOfSubplots(PD, bSubPlots) + else: + nSubPlots=len(PD) + spreadBy='mod-ip' + elif mode=='nTabs_1Col': + if bSubPlots: + if bCompare: + print('>>> TODO',mode,len(uTabs)) + else: + nSubPlots=len(uTabs) + spreadBy='it' + else: + raise Exception('Unknown mode, contact developer.') + return nSubPlots,spreadBy + + def distributePlots(self,mode,nSubPlots,spreadBy): + """ Assigns plot data to axes and axes to plot data """ + axes=self.fig.axes + + # Link plot data to axes + if nSubPlots==1 or spreadBy=='none': + axes[0].iPD=[i for i in range(len(self.plotData))] + else: + for ax in axes: + ax.iPD=[] + PD=self.plotData + uTabs=unique([pd.it for pd in PD]) + uiy=unique([pd.iy for pd in PD]) + if spreadBy=='iy': + for ipd,pd in enumerate(PD): + i=uiy.index(pd.iy) + if i < len(axes): + axes[i].iPD.append(ipd) + elif spreadBy=='it': + for ipd,pd in enumerate(PD): + i=uTabs.index(pd.it) + axes[i].iPD.append(ipd) + elif spreadBy=='mod-ip': + for ipd,pd in enumerate(PD): + i=np.mod(ipd, nSubPlots) + axes[i].iPD.append(ipd) + else: + raise Exception('Wrong spreadby value') + + def setLegendLabels(self,mode): + """ Set labels for legend """ + if mode=='1Tab_nCols': + for pd in self.plotData: + if self.pltTypePanel.cbMinMax.GetValue(): + pd.syl = no_unit(pd.sy) + else: + pd.syl = pd.sy + + elif mode=='nTabs_SameCols': + for pd in self.plotData: + pd.syl=pd.st + + elif mode=='nTabs_1Col': + usy=unique([pd.sy for pd in self.plotData]) + if len(usy)==1: + for pd in self.plotData: + pd.syl=pd.st + else: + for pd in self.plotData: + if self.pltTypePanel.cbMinMax.GetValue(): + pd.syl=no_unit(pd.sy) + else: + pd.syl=pd.sy #pd.syl=pd.st + ' - '+pd.sy + elif mode=='nTabs_SimCols': + bSubPlots = self.cbSub.IsChecked() + if bSubPlots: # spread by table name + for pd in self.plotData: + pd.syl=pd.st + else: + for pd in self.plotData: + pd.syl=pd.st + ' - '+pd.sy + elif mode=='nTabs_mCols': + usy=unique([pd.sy for pd in self.plotData]) + bSubPlots = self.cbSub.IsChecked() + if bSubPlots and len(usy)==1: # spread by table name + for pd in self.plotData: + pd.syl=pd.st + else: + for pd in self.plotData: + pd.syl=pd.st + ' - '+pd.sy + else: + raise Exception('Unknown mode {}'.format(mode)) + + + def empty(self): + self.cleanPlot() + + def clean_memory(self): + if hasattr(self,'plotData'): + del self.plotData + self.plotData=[] + for ax in self.fig.axes: + ax.iPD=[] + self.fig.delaxes(ax) + gc.collect() + + def clean_memory_plot(self): + pass + + def cleanPlot(self): + for ax in self.fig.axes: + if hasattr(ax,'iPD'): + del ax.iPD + self.fig.delaxes(ax) + gc.collect() + self.fig.add_subplot(111) + ax = self.fig.axes[0] + ax.set_axis_off() + #ax.plot(1,1) + self.canvas.draw() + gc.collect() + + def load_and_draw(self): + """ Full draw event: + - Get plot data based on selection + - Plot them + - Trigger changes to infoPanel + + """ + self.clean_memory() + self.getPlotData(self.pltTypePanel.plotType()) + if len(self.plotData)==0: + self.cleanPlot(); + return + mode=self.findPlotMode(self.plotData) + if self.pltTypePanel.cbCompare.GetValue(): + self.PD_Compare(mode) + if len(self.plotData)==0: + self.cleanPlot(); + return + self.redraw_same_data() + if self.infoPanel is not None: + self.infoPanel.showStats(self.plotData,self.pltTypePanel.plotType()) + + def redraw_same_data(self, keep_limits=True): + if len(self.plotData)==0: + self.cleanPlot(); + return + elif len(self.plotData) == 1: + if self.plotData[0].xIsString or self.plotData[0].yIsString or self.plotData[0].xIsDate or self.plotData[0].yIsDate: + self.cbAutoScale.SetValue(True) + else: + if len(self.xlim_prev)==0: # Might occur if some date didn't plot before (e.g. strings) + self.cbAutoScale.SetValue(True) + elif rectangleOverlap(self.plotData[0]._xMin[0], self.plotData[0]._yMin[0], + self.plotData[0]._xMax[0], self.plotData[0]._yMax[0], + self.xlim_prev[0][0], self.ylim_prev[0][0], + self.xlim_prev[0][1], self.ylim_prev[0][1]): + pass + else: + self.cbAutoScale.SetValue(True) + + mode=self.findPlotMode(self.plotData) + nPlots,spreadBy=self.findSubPlots(self.plotData,mode) + + self.clean_memory_plot() + self.set_subplots(nPlots) + self.distributePlots(mode,nPlots,spreadBy) + + if not self.pltTypePanel.cbCompare.GetValue(): + self.setLegendLabels(mode) + + self.plot_all(keep_limits) + self.canvas.draw() + + + def _store_limits(self): + self.xlim_prev = [] + self.ylim_prev = [] + for ax in self.fig.axes: + self.xlim_prev.append(ax.get_xlim()) + self.ylim_prev.append(ax.get_ylim()) + + def _restore_limits(self): + for ax, xlim, ylim in zip(self.fig.axes, self.xlim_prev, self.ylim_prev): + ax.set_xlim(xlim) + ax.set_ylim(ylim) + + +if __name__ == '__main__': + import pandas as pd; + from Tables import Table,TableList + + app = wx.App(False) + self=wx.Frame(None,-1,"Title") + self.SetSize((800, 600)) + #self.SetBackgroundColour('red') + class FakeSelPanel(wx.Panel): + def __init__(self, parent): + super(FakeSelPanel,self).__init__(parent) + d ={'ColA': np.linspace(0,1,100)+1,'ColB': np.random.normal(0,1,100)+0,'ColC':np.random.normal(0,1,100)+1} + df = pd.DataFrame(data=d) + self.tabList=TableList([Table(data=df)]) + + def getPlotDataSelection(self): + ID=[] + ID.append([0,0,2,'x','ColB','tab']) + ID.append([0,0,3,'x','ColC','tab']) + return ID,True + + selpanel=FakeSelPanel(self) + # selpanel.SetBackgroundColour('blue') + p1=PlotPanel(self,selpanel) + p1.load_and_draw() + #p1=SpectralCtrlPanel(self) + sizer = wx.BoxSizer(wx.VERTICAL) + sizer.Add(selpanel,0, flag = wx.EXPAND|wx.ALL,border = 10) + sizer.Add(p1,1, flag = wx.EXPAND|wx.ALL,border = 10) + self.SetSizer(sizer) + + self.Center() + self.Layout() + self.SetSize((800, 600)) + self.Show() + self.SendSizeEvent() + + #p1.showStats(None,[tab],[0],[0,1],tab.columns,0,erase=False) + + app.MainLoop() + + diff --git a/pydatview/main.py b/pydatview/main.py index eb71e5b..2f60547 100644 --- a/pydatview/main.py +++ b/pydatview/main.py @@ -145,8 +145,8 @@ def __init__(self, filename=None): self.Bind(wx.EVT_MENU, lambda e: self.onShowTool(e,'FASTRadialAverage'), dataMenu.Append(wx.ID_ANY, 'FAST - Radial average')) # --- Data Plugins - for string, function in dataPlugins: - self.Bind(wx.EVT_MENU, lambda e, s_loc=string: function(self, e, s_loc), dataMenu.Append(wx.ID_ANY, string)) + for string, function, isPanel in dataPlugins: + self.Bind(wx.EVT_MENU, lambda e, s_loc=string: self.onDataPlugin(e, s_loc), dataMenu.Append(wx.ID_ANY, string)) toolMenu = wx.Menu() menuBar.Append(toolMenu, "&Tools") @@ -364,6 +364,7 @@ def load_tabs_into_GUI(self, bReload=False, bAdd=False, bPlot=True): pass # Hack #self.onShowTool(tool='Resample') + #self.onDataPlugin(toolName='Bin data') def setStatusBar(self, ISel=None): nTabs=self.tabList.len() @@ -374,11 +375,11 @@ def setStatusBar(self, ISel=None): self.statusbar.SetStatusText('', 1) # Filenames self.statusbar.SetStatusText('', 2) # Shape elif nTabs==1: - self.statusbar.SetStatusText(self.tabList.get(0).fileformat.name, 0) + self.statusbar.SetStatusText(self.tabList.get(0).fileformat_name, 0) self.statusbar.SetStatusText(self.tabList.get(0).filename , 1) self.statusbar.SetStatusText(self.tabList.get(0).shapestring, 2) elif len(ISel)==1: - self.statusbar.SetStatusText(self.tabList.get(ISel[0]).fileformat.name , 0) + self.statusbar.SetStatusText(self.tabList.get(ISel[0]).fileformat_name , 0) self.statusbar.SetStatusText(self.tabList.get(ISel[0]).filename , 1) self.statusbar.SetStatusText(self.tabList.get(ISel[0]).shapestring, 2) else: @@ -433,6 +434,28 @@ def onShowTool(self, event=None, tool=''): return self.plotPanel.showTool(tool) + def onDataPlugin(self, event=None, toolName=''): + """ + Dispatcher to apply plugins to data: + - simple plugins are directly exectued + - plugins that are panels are sent over to plotPanel to show them + TODO merge with onShowTool + """ + if not hasattr(self,'plotPanel'): + Error(self,'Plot some data first') + return + + for thisToolName, function, isPanel in dataPlugins: + if toolName == thisToolName: + if isPanel: + panelClass = function(self, event, toolName) # getting panelClass + self.plotPanel.showToolPanel(panelClass) + else: + function(self, event, toolName) # calling the data function + return + raise NotImplementedError('Tool: ',toolName) + + def onSashChangeMain(self,event=None): pass # doent work because size is not communicated yet diff --git a/pydatview/plotdata.py b/pydatview/plotdata.py index 09073c9..17cb2f2 100644 --- a/pydatview/plotdata.py +++ b/pydatview/plotdata.py @@ -1,802 +1,807 @@ -from __future__ import absolute_import -import os -import numpy as np -from .common import no_unit, unit, inverse_unit, has_chinese_char -from .common import isString, isDate, getDt -from .common import unique, pretty_num, pretty_time -from .GUIMeasure import find_closest # Should not depend on wx - -class PlotData(): - """ - Class for plot data - - For now, relies on some "indices" related to Tables/Columns/ and maybe Selection panel - Not really elegant. These dependencies should be removed in the future - """ - def __init__(PD, x=None, y=None, sx='', sy=''): - """ Dummy init for now """ - PD.id=-1 - PD.it=-1 # tablx index - PD.ix=-1 # column index - PD.iy=-1 # column index - PD.sx='' # x label - PD.sy='' # y label - PD.st='' # table label - PD.syl='' # y label for legend - PD.filename = '' - PD.tabname = '' - PD.x =[] # x data - PD.y =[] # y data - PD.xIsString=False # true if strings - PD.xIsDate =False # true if dates - PD.yIsString=False # true if strings - PD.yIsDate =False # true if dates - - if x is not None and y is not None: - PD.fromXY(x,y,sx,sy) - - def fromIDs(PD, tabs, i, idx, SameCol, Options={}): - """ Nasty initialization of plot data from "IDs" """ - PD.id = i - PD.it = idx[0] # table index - PD.ix = idx[1] # x index - PD.iy = idx[2] # y index - PD.sx = idx[3] # x label - PD.sy = idx[4] # y label - PD.syl = '' # y label for legend - PD.st = idx[5] # table label - PD.filename = tabs[PD.it].filename - PD.tabname = tabs[PD.it].active_name - PD.SameCol = SameCol - PD.x, PD.xIsString, PD.xIsDate,_ = tabs[PD.it].getColumn(PD.ix) # actual x data, with info - PD.y, PD.yIsString, PD.yIsDate,c = tabs[PD.it].getColumn(PD.iy) # actual y data, with info - PD.c =c # raw values, used by PDF - - PD._post_init(Options=Options) - - def fromXY(PD, x, y, sx='', sy=''): - PD.x = x - PD.y = y - PD.c = y - PD.sx = sx - PD.sy = sy - PD.xIsString = isString(x) - PD.yIsString = isString(y) - PD.xIsDate = isDate (x) - PD.yIsDate = isDate (y) - - PD._post_init() - - - def _post_init(PD, Options={}): - # --- Perform data manipulation on the fly - #print(Options) - keys=Options.keys() - if 'RemoveOutliers' in keys: - if Options['RemoveOutliers']: - from pydatview.tools.signal import reject_outliers - try: - PD.x, PD.y = reject_outliers(PD.y, PD.x, m=Options['OutliersMedianDeviation']) - except: - raise Exception('Warn: Outlier removal failed. Desactivate it or use a different signal. ') - if 'Filter' in keys: - if Options['Filter']: - from pydatview.tools.signal import applyFilter - PD.y = applyFilter(PD.x, PD.y, Options['Filter']) - - if 'Sampler' in keys: - if Options['Sampler']: - from pydatview.tools.signal import applySampler - PD.x, PD.y = applySampler(PD.x, PD.y, Options['Sampler']) - - # --- Store stats - n=len(PD.y) - if n>1000: - if (PD.xIsString): - raise Exception('Error: x values contain more than 1000 string. This is not suitable for plotting.\n\nPlease select another column for table: {}\nProblematic column: {}\n'.format(PD.st,PD.sx)) - if (PD.yIsString): - raise Exception('Error: y values contain more than 1000 string. This is not suitable for plotting.\n\nPlease select another column for table: {}\nProblematic column: {}\n'.format(PD.st,PD.sy)) - - PD.needChineseFont = has_chinese_char(PD.sy) or has_chinese_char(PD.sx) - # Stats of the raw data (computed once and for all, since it can be expensive for large dataset - PD.computeRange() - # Store the values of the original data (labelled "0"), since the data might be modified later by PDF or MinMax etc. - PD._y0Min = PD._yMin - PD._y0Max = PD._yMax - PD._x0Min = PD._xMin - PD._x0Max = PD._xMax - PD._x0AtYMin = PD._xAtYMin - PD._x0AtYMax = PD._xAtYMax - PD._y0Std = PD.yStd() - PD._y0Mean = PD.yMean() - PD._n0 = (n,'{:d}'.format(n)) - PD.x0 =PD.x - PD.y0 =PD.y - # Store xyMeas input values so we don't need to recompute xyMeas in case they didn't change - PD.xyMeasInput1, PD.xyMeasInput2 = None, None - PD.xyMeas1, PD.xyMeas2 = None, None - - def __repr__(s): - s1='id:{}, it:{}, ix:{}, iy:{}, sx:"{}", sy:"{}", st:{}, syl:{}\n'.format(s.id,s.it,s.ix,s.iy,s.sx,s.sy,s.st,s.syl) - return s1 - - def toPDF(PD, nBins=30, smooth=False): - """ Convert y-data to Probability density function (PDF) as function of x - Uses "stats" library (from welib/pybra) - NOTE: inPlace - """ - from pydatview.tools.stats import pdf_gaussian_kde, pdf_histogram - - n=len(PD.y) - if PD.yIsString: - if n>100: - raise Exception('Warn: Dataset has string format and is too large to display') - vc = PD.c.value_counts().sort_index() - PD.x = vc.keys().tolist() - PD.y = vc/n # TODO counts/PDF option - PD.yIsString=False - PD.xIsString=True - elif PD.yIsDate: - raise Exception('Warn: Cannot plot PDF of dates') - else: - if nBins>=n: - nBins=n - if smooth: - try: - PD.x, PD.y = pdf_gaussian_kde(PD.y, nOut=nBins) - except np.linalg.LinAlgError as e: - PD.x, PD.y = pdf_histogram(PD.y, nBins=nBins, norm=True, count=False) - else: - PD.x, PD.y = pdf_histogram(PD.y, nBins=nBins, norm=True, count=False) - PD.xIsString=False - PD.yIsString=False - - PD.sx = PD.sy; - PD.sy = 'PDF('+no_unit(PD.sy)+')' - iu = inverse_unit(PD.sy) - if len(iu)>0: - PD.sy += ' ['+ iu +']' - - # Compute min max once and for all - PD.computeRange() - - return nBins - - - def toMinMax(PD, xScale=False, yScale=True): - """ Convert plot data to MinMax data based on GUI options - NOTE: inPlace - """ - if yScale: - if PD.yIsString: - raise Exception('Warn: Cannot compute min-max for strings') - mi = PD._y0Min[0] #mi= np.nanmin(PD.y) - mx = PD._y0Max[0] #mx= np.nanmax(PD.y) - if mi == mx: - PD.y=PD.y*0 - else: - PD.y = (PD.y-mi)/(mx-mi) - PD._yMin=0,'0' - PD._yMax=1,'1' - if xScale: - if PD.xIsString: - raise Exception('Warn: Cannot compute min-max for strings') - mi= PD._x0Min[0] - mx= PD._x0Max[0] - if mi == mx: - PD.x=PD.x*0 - else: - PD.x = (PD.x-mi)/(mx-mi) - PD._xMin=0,'0' - PD._xMax=1,'1' - - # Compute min max once and for all - #PD.computeRange() - - return None - - - def toFFT(PD, yType='Amplitude', xType='1/x', avgMethod='Welch', avgWindow='Hamming', bDetrend=True, nExp=8): - """ - Uses spectral.fft_wrap to generate a "FFT" plot data, with various options: - yType : amplitude, PSD, f x PSD - xType : 1/x, x, 2pi/x - avgMethod : None, Welch - avgWindow : Hamming, Hann, Rectangular - see module spectral for more - - NOTE: inplace (modifies itself), does not return a new instance - """ - from pydatview.tools.spectral import fft_wrap - - # --- TODO, make this independent of GUI - if PD.yIsString or PD.yIsDate: - raise Exception('Warn: Cannot plot FFT of dates or strings') - elif PD.xIsString: - raise Exception('Warn: Cannot plot FFT if x axis is string') - - dt=None - if PD.xIsDate: - dt = getDt(PD.x) - # --- Computing fft - x is freq, y is Amplitude - PD.x, PD.y, Info = fft_wrap(PD.x, PD.y, dt=dt, output_type=yType,averaging=avgMethod, averaging_window=avgWindow,detrend=bDetrend,nExp=nExp) - # --- Setting plot options - PD._Info=Info - PD.xIsDate=False - # y label - if yType=='PSD': - PD.sy= 'PSD({}) [({})^2/{}]'.format(no_unit(PD.sy), unit(PD.sy), unit(PD.sx)) - elif yType=='f x PSD': - PD.sy= 'f-weighted PSD({}) [({})^2]'.format(no_unit(PD.sy), unit(PD.sy)) - elif yType=='Amplitude': - PD.sy= 'FFT({}) [{}]'.format(no_unit(PD.sy), unit(PD.sy)) - else: - raise Exception('Unsupported FFT type {} '.format(yType)) - # x label - if xType=='1/x': - if unit(PD.sx)=='s': - PD.sx= 'Frequency [Hz]' - else: - PD.sx= '' - elif xType=='x': - PD.x=1/PD.x - if unit(PD.sx)=='s': - PD.sx= 'Period [s]' - else: - PD.sx= '' - elif xType=='2pi/x': - PD.x=2*np.pi*PD.x - if unit(PD.sx)=='s': - PD.sx= 'Cyclic frequency [rad/s]' - else: - PD.sx= '' - else: - raise Exception('Unsupported x-type {} '.format(xType)) - - PD.computeRange() - return Info - - def computeRange(PD): - """ Compute min max of data once and for all and store - From the performance tests, this ends up having a non negligible cost for large dataset, - so we store it to reuse these as much as possible. - If possible, should be used for the plotting as well, so that matplotlib don't - have to compute them again - NOTE: each variable is a tuple (v,s), with a float and its string representation - """ - PD._xMin = PD._xMinCalc() - PD._xMax = PD._xMaxCalc() - PD._yMin = PD._yMinCalc() - PD._yMax = PD._yMaxCalc() - PD._xAtYMin = PD._xAtYMinCalc(PD._yMin[0]) - PD._xAtYMax = PD._xAtYMaxCalc(PD._yMax[0]) - - - # --------------------------------------------------------------------------------} - # --- Stats functions that should only becalled once, could maybe use @attributes.. - # --------------------------------------------------------------------------------{ - def _yMinCalc(PD): - if PD.yIsString: - return PD.y[0],PD.y[0].strip() - elif PD.yIsDate: - return PD.y[0],'{}'.format(PD.y[0]) - else: - v=np.nanmin(PD.y) - s=pretty_num(v) - return (v,s) - - def _yMaxCalc(PD): - if PD.yIsString: - return PD.y[-1],PD.y[-1].strip() - elif PD.yIsDate: - return PD.y[-1],'{}'.format(PD.y[-1]) - else: - v=np.nanmax(PD.y) - s=pretty_num(v) - return (v,s) - - def _xAtYMinCalc(PD, yMin): - if PD.xIsString: - return PD.x[0],PD.x[0].strip() - elif PD.xIsDate: - return PD.x[0],'{}'.format(PD.x[0]) - else: - try: - v = PD.x[np.where(PD.y == yMin)[0][0]] # Might fail if all nan - except: - v = PD.x[0] - s=pretty_num(v) - return (v,s) - - def _xAtYMaxCalc(PD, yMax): - if PD.xIsString: - return PD.x[-1],PD.x[-1].strip() - elif PD.xIsDate: - return PD.x[-1],'{}'.format(PD.x[-1]) - else: - try: - v = PD.x[np.where(PD.y == yMax)[0][0]] # Might fail if all nan - except: - v = PD.x[0] - s=pretty_num(v) - return (v,s) - - def _xMinCalc(PD): - if PD.xIsString: - return PD.x[0],PD.x[0].strip() - elif PD.xIsDate: - return PD.x[0],'{}'.format(PD.x[0]) - else: - v=np.nanmin(PD.x) - s=pretty_num(v) - return (v,s) - - def _xMaxCalc(PD): - if PD.xIsString: - return PD.x[-1],PD.x[-1].strip() - elif PD.xIsDate: - return PD.x[-1],'{}'.format(PD.x[-1]) - else: - v=np.nanmax(PD.x) - s=pretty_num(v) - return (v,s) - - def xMin(PD): - return PD._xMin - - def xMax(PD): - return PD._xMax - - def xAtYMin(PD): - return PD._xAtYMin - - def xAtYMax(PD): - return PD._xAtYMax - - def yMin(PD): - return PD._yMin - - def yMax(PD): - return PD._yMax - - def y0Min(PD): - return PD._y0Min - - def y0Max(PD): - return PD._y0Max - - def y0Mean(PD): - return PD._y0Mean - - def y0Std(PD): - return PD._y0Std - - def n0(PD): - return PD._n0 - - # --------------------------------------------------------------------------------} - # --- Stats functions - # --------------------------------------------------------------------------------{ - def yMean(PD): - if PD.yIsString or PD.yIsDate: - return None,'NA' - else: - v=np.nanmean(PD.y) - s=pretty_num(v) - return (v,s) - - def yMedian(PD): - if PD.yIsString or PD.yIsDate: - return None,'NA' - else: - v=np.nanmedian(PD.y) - s=pretty_num(v) - return (v,s) - - def yStd(PD): - if PD.yIsString or PD.yIsDate: - return None,'NA' - else: - v=np.nanstd(PD.y) - s=pretty_num(v) - return (v,s) - - def yName(PD): - return PD.sy, PD.sy - - def fileName(PD): - return os.path.basename(PD.filename), os.path.basename(PD.filename) - - def baseDir(PD): - return os.path.dirname(PD.filename),os.path.join(os.path.dirname(PD.filename),'') - - def tabName(PD): - return PD.tabname, PD.tabname - - def ylen(PD): - v=len(PD.y) - s='{:d}'.format(v) - return v,s - - - def y0Var(PD): - if PD._y0Std[0] is not None: - v=PD._y0Std[0]**2 - s=pretty_num(v) - else: - v=None - s='NA' - return v,s - - def y0TI(PD): - v=PD._y0Std[0]/PD._y0Mean[0] - s=pretty_num(v) - return v,s - - - def yRange(PD): - if PD.yIsString: - return 'NA','NA' - elif PD.yIsDate: - dtAll=getDt([PD.x[-1]-PD.x[0]]) - return '',pretty_time(dtAll) - else: - v=np.nanmax(PD.y)-np.nanmin(PD.y) - s=pretty_num(v) - return v,s - - def yAbsMax(PD): - if PD.yIsString or PD.yIsDate: - return 'NA','NA' - else: - v=max(np.abs(PD._y0Min[0]),np.abs(PD._y0Max[0])) - s=pretty_num(v) - return v,s - - - def xRange(PD): - if PD.xIsString: - return 'NA','NA' - elif PD.xIsDate: - dtAll=getDt([PD.x[-1]-PD.x[0]]) - return '',pretty_time(dtAll) - else: - v=np.nanmax(PD.x)-np.nanmin(PD.x) - s=pretty_num(v) - return v,s - - - def inty(PD): - if PD.yIsString or PD.yIsDate or PD.xIsString or PD.xIsDate: - return None,'NA' - else: - v=np.trapz(y=PD.y,x=PD.x) - s=pretty_num(v) - return v,s - - def intyintdx(PD): - if PD.yIsString or PD.yIsDate or PD.xIsString or PD.xIsDate: - return None,'NA' - else: - v=np.trapz(y=PD.y,x=PD.x)/np.trapz(y=PD.x*0+1,x=PD.x) - s=pretty_num(v) - return v,s - - def intyx1(PD): - if PD.yIsString or PD.yIsDate or PD.xIsString or PD.xIsDate: - return None,'NA' - else: - v=np.trapz(y=PD.y*PD.x,x=PD.x) - s=pretty_num(v) - return v,s - - def intyx1_scaled(PD): - if PD.yIsString or PD.yIsDate or PD.xIsString or PD.xIsDate: - return None,'NA' - else: - v=np.trapz(y=PD.y*PD.x,x=PD.x) - v=v/np.trapz(y=PD.y,x=PD.x) - s=pretty_num(v) - return v,s - - def intyx2(PD): - if PD.yIsString or PD.yIsDate or PD.xIsString or PD.xIsDate: - return None,'NA' - else: - v=np.trapz(y=PD.y*PD.x**2,x=PD.x) - s=pretty_num(v) - return v,s - - def meas1(PD, xymeas1, xymeas2): - if PD.xyMeasInput1 is not None and PD.xyMeasInput1 == xymeas1: - yv = PD.xyMeas1[1] - s = pretty_num(yv) - else: - xv, yv, s = PD._meas(xymeas1) - PD.xyMeas1 = [xv, yv] - PD.xyMeasInput1 = xymeas1 - return yv, s - - def meas2(PD, xymeas1, xymeas2): - if PD.xyMeasInput2 is not None and PD.xyMeasInput2 == xymeas2: - yv = PD.xyMeas2[1] - s = pretty_num(yv) - else: - xv, yv, s = PD._meas(xymeas2) - PD.xyMeas2 = [xv, yv] - PD.xyMeasInput2 = xymeas2 - return yv, s - - def yMeanMeas(PD): - return PD._measCalc('mean') - - def yMinMeas(PD): - return PD._measCalc('min') - - def yMaxMeas(PD): - return PD._measCalc('max') - - def xAtYMinMeas(PD): - return PD._measCalc('xmin') - - def xAtYMaxMeas(PD): - return PD._measCalc('xmax') - - def _meas(PD, xymeas): - try: - xv, yv = 'NA', 'NA' - xy = np.array([PD.x, PD.y]).transpose() - points = find_closest(xy, [xymeas[0], xymeas[1]], False) - if points.ndim == 1: - xv, yv = points[0:2] - s = pretty_num(yv) - else: - xv, yv = points[0, 0], points[0, 1] - s = ' / '.join([str(p) for p in points[:, 1]]) - except (IndexError, TypeError): - xv, yv = 'NA', 'NA' - s='NA' - return xv, yv, s - - def _measCalc(PD, mode): - if PD.xyMeas1 is None or PD.xyMeas2 is None: - return 'NA', 'NA' - try: - v = 'NA' - left_index = np.where(PD.x == PD.xyMeas1[0])[0][0] - right_index = np.where(PD.x == PD.xyMeas2[0])[0][0] - if left_index == right_index: - raise IndexError - if left_index > right_index: - left_index, right_index = right_index, left_index - if mode == 'mean': - v = np.nanmean(PD.y[left_index:right_index]) - elif mode == 'min': - v = np.nanmin(PD.y[left_index:right_index]) - elif mode == 'max': - v = np.nanmax(PD.y[left_index:right_index]) - elif mode == 'xmin': - v = PD.x[left_index + np.where(PD.y[left_index:right_index] == np.nanmin(PD.y[left_index:right_index]))[0][0]] - elif mode == 'xmax': - v = PD.x[left_index + np.where(PD.y[left_index:right_index] == np.nanmax(PD.y[left_index:right_index]))[0][0]] - else: - raise NotImplementedError('Error: Mode ' + mode + ' not implemented') - s = pretty_num(v) - except (IndexError, TypeError): - v = 'NA' - s = 'NA' - return v, s - - def dx(PD): - if len(PD.x)<=1: - return 'NA','NA' - if PD.xIsString: - return None,'NA' - elif PD.xIsDate: - dt=getDt(PD.x) - return dt,pretty_time(dt) - else: - v=PD.x[1]-PD.x[0] - s=pretty_num(v) - return v,s - - def xMax(PD): - if PD.xIsString: - return PD.x[-1],PD.x[-1] - elif PD.xIsDate: - return PD.x[-1],'{}'.format(PD.x[-1]) - else: - v=np.nanmax(PD.x) - s=pretty_num(v) - return v,s - def xMin(PD): - if PD.xIsString: - return PD.x[0],PD.x[0] - elif PD.xIsDate: - return PD.x[0],'{}'.format(PD.x[0]) - else: - v=np.nanmin(PD.x) - s=pretty_num(v) - return v,s - - def leq(PD,m): - from pydatview.tools.fatigue import eq_load - if PD.yIsString or PD.yIsDate: - return 'NA','NA' - else: - T,_=PD.xRange() - v=eq_load(PD.y, m=m, neq=T)[0][0] - return v,pretty_num(v) - - def Info(PD,var): - if var=='LSeg': - return '','{:d}'.format(PD._Info.LSeg) - elif var=='LWin': - return '','{:d}'.format(PD._Info.LWin) - elif var=='LOvlp': - return '','{:d}'.format(PD._Info.LOvlp) - elif var=='nFFT': - return '','{:d}'.format(PD._Info.nFFT) - - -# --------------------------------------------------------------------------------} -# --- -# --------------------------------------------------------------------------------{ -def compareMultiplePD(PD, mode, sComp): - """ - PD: list of PlotData - sComp: string in ['Relative', '|Relative|', 'Ratio', 'Absolute' - mode: plot mode, nTabs_1Col, nTabs_SameCols, nTabs_SimCols - - return: - PD_comp : new PlotData list that compares the input list PD - - """ - # --- Helper function - def getError(y,yref,method): - if len(y)!=len(yref): - raise NotImplementedError('Cannot compare signals of different lengths') - if sComp=='Relative': - if np.mean(np.abs(yref))<1e-7: - Error=(y-yRef)/(yRef+1)*100 - else: - Error=(y-yRef)/yRef*100 - elif sComp=='|Relative|': - if np.mean(np.abs(yref))<1e-7: - Error=abs((y-yRef)/(yRef+1))*100 - else: - Error=abs((y-yRef)/yRef)*100 - elif sComp=='Ratio': - if np.mean(np.abs(yref))<1e-7: - Error=(y+1)/(yRef+1) - else: - Error=y/yRef - elif sComp=='Absolute': - Error=y-yRef - else: - raise Exception('Something wrong '+sComp) - return Error - - def getErrorLabel(ylab=''): - if len(ylab)>0: - ylab=no_unit(ylab) - ylab='in '+ylab+' ' - if sComp=='Relative': - return 'Relative error '+ylab+'[%]'; - elif sComp=='|Relative|': - return 'Abs. relative error '+ylab+'[%]'; - if sComp=='Ratio': - return 'Ratio '+ylab.replace('in','of')+'[-]'; - elif sComp=='Absolute': - usy = unique([pd.sy for pd in PD]) - yunits= unique([unit(sy) for sy in usy]) - if len(yunits)==1 and len(yunits[0])>0: - return 'Absolute error '+ylab+'['+yunits[0]+']' - else: - return 'Absolute error '+ylab; - elif sComp=='Y-Y': - return PD[0].sy - - xlabelAll=PD[0].sx - - - if any([pd.yIsString for pd in PD]): - raise Exception('Warn: Cannot compare strings') - if any([pd.yIsDate for pd in PD]): - raise Exception('Warn: Cannot compare dates with other values') - - if mode=='nTabs_1Col': - ylabelAll=getErrorLabel(PD[1].sy) - usy = unique([pd.sy for pd in PD]) - #print('Compare - different tabs - 1 col') - st = [pd.st for pd in PD] - if len(usy)==1: - SS=usy[0] + ', '+ ' wrt. '.join(st[::-1]) - if sComp=='Y-Y': - xlabelAll=PD[0].st+', '+PD[0].sy - ylabelAll=PD[1].st+', '+PD[1].sy - else: - SS=' wrt. '.join(usy[::-1]) - if sComp=='Y-Y': - xlabelAll=PD[0].sy - ylabelAll=PD[1].sy - - xRef = PD[0].x - yRef = PD[0].y - PD[1].syl=SS - y=np.interp(xRef,PD[1].x,PD[1].y) - if sComp=='Y-Y': - PD[1].x=yRef - PD[1].y=y - else: - Error = getError(y,yRef,sComp) - PD[1].x=xRef - PD[1].y=Error - PD[1].sx=xlabelAll - PD[1].sy=ylabelAll - PD_comp=[PD[1]] # return - - elif mode=='1Tab_nCols': - # --- Compare one table - different columns - #print('One Tab, different columns') - ylabelAll=getErrorLabel() - xRef = PD[0].x - yRef = PD[0].y - pdRef=PD[0] - for pd in PD[1:]: - if sComp=='Y-Y': - pd.syl = no_unit(pd.sy)+' wrt. '+no_unit(pdRef.sy) - pd.x = yRef - pd.sx = PD[0].sy - else: - pd.syl = no_unit(pd.sy)+' wrt. '+no_unit(pdRef.sy) - pd.sx = xlabelAll - pd.sy = ylabelAll - Error = getError(pd.y,yRef,sComp) - pd.x=xRef - pd.y=Error - PD_comp=PD[1:] - elif mode =='nTabs_SameCols': - # --- Compare different tables, same column - #print('Several Tabs, same columns') - uiy=unique([pd.iy for pd in PD]) - uit=unique([pd.it for pd in PD]) - PD_comp=[] - for iy in uiy: - PD_SameCol=[pd for pd in PD if pd.iy==iy] - xRef = PD_SameCol[0].x - yRef = PD_SameCol[0].y - ylabelAll=getErrorLabel(PD_SameCol[0].sy) - for pd in PD_SameCol[1:]: - if pd.xIsString: - if len(xRef)==len(pd.x): - pass # fine able to interpolate - else: - raise Exception('X values have different length and are strings, cannot interpolate string. Use `Index` for x instead.') - else: - pd.y=np.interp(xRef,pd.x,pd.y) - if sComp=='Y-Y': - pd.x=yRef - pd.sx=PD_SameCol[0].st+', '+PD_SameCol[0].sy - if len(PD_SameCol)==1: - pd.sy =pd.st+', '+pd.sy - else: - pd.syl= pd.st - else: - if len(uit)<=2: - pd.syl = pd.st+' wrt. '+PD_SameCol[0].st+', '+pd.sy - else: - pd.syl = pd.st+'|'+pd.sy - pd.sx = xlabelAll - pd.sy = ylabelAll - Error = getError(pd.y,yRef,sComp) - pd.x=xRef - pd.y=Error - PD_comp.append(pd) - elif mode =='nTabs_SimCols': - # --- Compare different tables, similar columns - print('Several Tabs, similar columns, TODO') - PD_comp=[] - - return PD_comp - +from __future__ import absolute_import +import os +import numpy as np +from .common import no_unit, unit, inverse_unit, has_chinese_char +from .common import isString, isDate, getDt +from .common import unique, pretty_num, pretty_time +from .GUIMeasure import find_closest # Should not depend on wx + +class PlotData(): + """ + Class for plot data + + For now, relies on some "indices" related to Tables/Columns/ and maybe Selection panel + Not really elegant. These dependencies should be removed in the future + """ + def __init__(PD, x=None, y=None, sx='', sy=''): + """ Dummy init for now """ + PD.id=-1 + PD.it=-1 # tablx index + PD.ix=-1 # column index + PD.iy=-1 # column index + PD.sx='' # x label + PD.sy='' # y label + PD.st='' # table label + PD.syl='' # y label for legend + PD.filename = '' + PD.tabname = '' + PD.x =[] # x data + PD.y =[] # y data + PD.xIsString=False # true if strings + PD.xIsDate =False # true if dates + PD.yIsString=False # true if strings + PD.yIsDate =False # true if dates + + if x is not None and y is not None: + PD.fromXY(x,y,sx,sy) + + def fromIDs(PD, tabs, i, idx, SameCol, Options={}): + """ Nasty initialization of plot data from "IDs" """ + PD.id = i + PD.it = idx[0] # table index + PD.ix = idx[1] # x index + PD.iy = idx[2] # y index + PD.sx = idx[3] # x label + PD.sy = idx[4] # y label + PD.syl = '' # y label for legend + PD.st = idx[5] # table label + PD.filename = tabs[PD.it].filename + PD.tabname = tabs[PD.it].active_name + PD.SameCol = SameCol + PD.x, PD.xIsString, PD.xIsDate,_ = tabs[PD.it].getColumn(PD.ix) # actual x data, with info + PD.y, PD.yIsString, PD.yIsDate,c = tabs[PD.it].getColumn(PD.iy) # actual y data, with info + PD.c =c # raw values, used by PDF + + PD._post_init(Options=Options) + + def fromXY(PD, x, y, sx='', sy=''): + PD.x = x + PD.y = y + PD.c = y + PD.sx = sx + PD.sy = sy + PD.xIsString = isString(x) + PD.yIsString = isString(y) + PD.xIsDate = isDate (x) + PD.yIsDate = isDate (y) + + PD._post_init() + + + def _post_init(PD, Options={}): + # --- Perform data manipulation on the fly + #print(Options) + keys=Options.keys() + if 'RemoveOutliers' in keys: + if Options['RemoveOutliers']: + from pydatview.tools.signal import reject_outliers + try: + PD.x, PD.y = reject_outliers(PD.y, PD.x, m=Options['OutliersMedianDeviation']) + except: + raise Exception('Warn: Outlier removal failed. Desactivate it or use a different signal. ') + if 'Filter' in keys: + if Options['Filter']: + from pydatview.tools.signal import applyFilter + PD.y = applyFilter(PD.x, PD.y, Options['Filter']) + + if 'Sampler' in keys: + if Options['Sampler']: + from pydatview.tools.signal import applySampler + PD.x, PD.y = applySampler(PD.x, PD.y, Options['Sampler']) + + if 'Binning' in keys: + if Options['Binning']: + if Options['Binning']['active']: + PD.x, PD.y = Options['Binning']['applyCallBack'](PD.x, PD.y, Options['Binning']) + + # --- Store stats + n=len(PD.y) + if n>1000: + if (PD.xIsString): + raise Exception('Error: x values contain more than 1000 string. This is not suitable for plotting.\n\nPlease select another column for table: {}\nProblematic column: {}\n'.format(PD.st,PD.sx)) + if (PD.yIsString): + raise Exception('Error: y values contain more than 1000 string. This is not suitable for plotting.\n\nPlease select another column for table: {}\nProblematic column: {}\n'.format(PD.st,PD.sy)) + + PD.needChineseFont = has_chinese_char(PD.sy) or has_chinese_char(PD.sx) + # Stats of the raw data (computed once and for all, since it can be expensive for large dataset + PD.computeRange() + # Store the values of the original data (labelled "0"), since the data might be modified later by PDF or MinMax etc. + PD._y0Min = PD._yMin + PD._y0Max = PD._yMax + PD._x0Min = PD._xMin + PD._x0Max = PD._xMax + PD._x0AtYMin = PD._xAtYMin + PD._x0AtYMax = PD._xAtYMax + PD._y0Std = PD.yStd() + PD._y0Mean = PD.yMean() + PD._n0 = (n,'{:d}'.format(n)) + PD.x0 =PD.x + PD.y0 =PD.y + # Store xyMeas input values so we don't need to recompute xyMeas in case they didn't change + PD.xyMeasInput1, PD.xyMeasInput2 = None, None + PD.xyMeas1, PD.xyMeas2 = None, None + + def __repr__(s): + s1='id:{}, it:{}, ix:{}, iy:{}, sx:"{}", sy:"{}", st:{}, syl:{}\n'.format(s.id,s.it,s.ix,s.iy,s.sx,s.sy,s.st,s.syl) + return s1 + + def toPDF(PD, nBins=30, smooth=False): + """ Convert y-data to Probability density function (PDF) as function of x + Uses "stats" library (from welib/pybra) + NOTE: inPlace + """ + from pydatview.tools.stats import pdf_gaussian_kde, pdf_histogram + + n=len(PD.y) + if PD.yIsString: + if n>100: + raise Exception('Warn: Dataset has string format and is too large to display') + vc = PD.c.value_counts().sort_index() + PD.x = vc.keys().tolist() + PD.y = vc/n # TODO counts/PDF option + PD.yIsString=False + PD.xIsString=True + elif PD.yIsDate: + raise Exception('Warn: Cannot plot PDF of dates') + else: + if nBins>=n: + nBins=n + if smooth: + try: + PD.x, PD.y = pdf_gaussian_kde(PD.y, nOut=nBins) + except np.linalg.LinAlgError as e: + PD.x, PD.y = pdf_histogram(PD.y, nBins=nBins, norm=True, count=False) + else: + PD.x, PD.y = pdf_histogram(PD.y, nBins=nBins, norm=True, count=False) + PD.xIsString=False + PD.yIsString=False + + PD.sx = PD.sy; + PD.sy = 'PDF('+no_unit(PD.sy)+')' + iu = inverse_unit(PD.sy) + if len(iu)>0: + PD.sy += ' ['+ iu +']' + + # Compute min max once and for all + PD.computeRange() + + return nBins + + + def toMinMax(PD, xScale=False, yScale=True): + """ Convert plot data to MinMax data based on GUI options + NOTE: inPlace + """ + if yScale: + if PD.yIsString: + raise Exception('Warn: Cannot compute min-max for strings') + mi = PD._y0Min[0] #mi= np.nanmin(PD.y) + mx = PD._y0Max[0] #mx= np.nanmax(PD.y) + if mi == mx: + PD.y=PD.y*0 + else: + PD.y = (PD.y-mi)/(mx-mi) + PD._yMin=0,'0' + PD._yMax=1,'1' + if xScale: + if PD.xIsString: + raise Exception('Warn: Cannot compute min-max for strings') + mi= PD._x0Min[0] + mx= PD._x0Max[0] + if mi == mx: + PD.x=PD.x*0 + else: + PD.x = (PD.x-mi)/(mx-mi) + PD._xMin=0,'0' + PD._xMax=1,'1' + + # Compute min max once and for all + #PD.computeRange() + + return None + + + def toFFT(PD, yType='Amplitude', xType='1/x', avgMethod='Welch', avgWindow='Hamming', bDetrend=True, nExp=8): + """ + Uses spectral.fft_wrap to generate a "FFT" plot data, with various options: + yType : amplitude, PSD, f x PSD + xType : 1/x, x, 2pi/x + avgMethod : None, Welch + avgWindow : Hamming, Hann, Rectangular + see module spectral for more + + NOTE: inplace (modifies itself), does not return a new instance + """ + from pydatview.tools.spectral import fft_wrap + + # --- TODO, make this independent of GUI + if PD.yIsString or PD.yIsDate: + raise Exception('Warn: Cannot plot FFT of dates or strings') + elif PD.xIsString: + raise Exception('Warn: Cannot plot FFT if x axis is string') + + dt=None + if PD.xIsDate: + dt = getDt(PD.x) + # --- Computing fft - x is freq, y is Amplitude + PD.x, PD.y, Info = fft_wrap(PD.x, PD.y, dt=dt, output_type=yType,averaging=avgMethod, averaging_window=avgWindow,detrend=bDetrend,nExp=nExp) + # --- Setting plot options + PD._Info=Info + PD.xIsDate=False + # y label + if yType=='PSD': + PD.sy= 'PSD({}) [({})^2/{}]'.format(no_unit(PD.sy), unit(PD.sy), unit(PD.sx)) + elif yType=='f x PSD': + PD.sy= 'f-weighted PSD({}) [({})^2]'.format(no_unit(PD.sy), unit(PD.sy)) + elif yType=='Amplitude': + PD.sy= 'FFT({}) [{}]'.format(no_unit(PD.sy), unit(PD.sy)) + else: + raise Exception('Unsupported FFT type {} '.format(yType)) + # x label + if xType=='1/x': + if unit(PD.sx)=='s': + PD.sx= 'Frequency [Hz]' + else: + PD.sx= '' + elif xType=='x': + PD.x=1/PD.x + if unit(PD.sx)=='s': + PD.sx= 'Period [s]' + else: + PD.sx= '' + elif xType=='2pi/x': + PD.x=2*np.pi*PD.x + if unit(PD.sx)=='s': + PD.sx= 'Cyclic frequency [rad/s]' + else: + PD.sx= '' + else: + raise Exception('Unsupported x-type {} '.format(xType)) + + PD.computeRange() + return Info + + def computeRange(PD): + """ Compute min max of data once and for all and store + From the performance tests, this ends up having a non negligible cost for large dataset, + so we store it to reuse these as much as possible. + If possible, should be used for the plotting as well, so that matplotlib don't + have to compute them again + NOTE: each variable is a tuple (v,s), with a float and its string representation + """ + PD._xMin = PD._xMinCalc() + PD._xMax = PD._xMaxCalc() + PD._yMin = PD._yMinCalc() + PD._yMax = PD._yMaxCalc() + PD._xAtYMin = PD._xAtYMinCalc(PD._yMin[0]) + PD._xAtYMax = PD._xAtYMaxCalc(PD._yMax[0]) + + + # --------------------------------------------------------------------------------} + # --- Stats functions that should only becalled once, could maybe use @attributes.. + # --------------------------------------------------------------------------------{ + def _yMinCalc(PD): + if PD.yIsString: + return PD.y[0],PD.y[0].strip() + elif PD.yIsDate: + return PD.y[0],'{}'.format(PD.y[0]) + else: + v=np.nanmin(PD.y) + s=pretty_num(v) + return (v,s) + + def _yMaxCalc(PD): + if PD.yIsString: + return PD.y[-1],PD.y[-1].strip() + elif PD.yIsDate: + return PD.y[-1],'{}'.format(PD.y[-1]) + else: + v=np.nanmax(PD.y) + s=pretty_num(v) + return (v,s) + + def _xAtYMinCalc(PD, yMin): + if PD.xIsString: + return PD.x[0],PD.x[0].strip() + elif PD.xIsDate: + return PD.x[0],'{}'.format(PD.x[0]) + else: + try: + v = PD.x[np.where(PD.y == yMin)[0][0]] # Might fail if all nan + except: + v = PD.x[0] + s=pretty_num(v) + return (v,s) + + def _xAtYMaxCalc(PD, yMax): + if PD.xIsString: + return PD.x[-1],PD.x[-1].strip() + elif PD.xIsDate: + return PD.x[-1],'{}'.format(PD.x[-1]) + else: + try: + v = PD.x[np.where(PD.y == yMax)[0][0]] # Might fail if all nan + except: + v = PD.x[0] + s=pretty_num(v) + return (v,s) + + def _xMinCalc(PD): + if PD.xIsString: + return PD.x[0],PD.x[0].strip() + elif PD.xIsDate: + return PD.x[0],'{}'.format(PD.x[0]) + else: + v=np.nanmin(PD.x) + s=pretty_num(v) + return (v,s) + + def _xMaxCalc(PD): + if PD.xIsString: + return PD.x[-1],PD.x[-1].strip() + elif PD.xIsDate: + return PD.x[-1],'{}'.format(PD.x[-1]) + else: + v=np.nanmax(PD.x) + s=pretty_num(v) + return (v,s) + + def xMin(PD): + return PD._xMin + + def xMax(PD): + return PD._xMax + + def xAtYMin(PD): + return PD._xAtYMin + + def xAtYMax(PD): + return PD._xAtYMax + + def yMin(PD): + return PD._yMin + + def yMax(PD): + return PD._yMax + + def y0Min(PD): + return PD._y0Min + + def y0Max(PD): + return PD._y0Max + + def y0Mean(PD): + return PD._y0Mean + + def y0Std(PD): + return PD._y0Std + + def n0(PD): + return PD._n0 + + # --------------------------------------------------------------------------------} + # --- Stats functions + # --------------------------------------------------------------------------------{ + def yMean(PD): + if PD.yIsString or PD.yIsDate: + return None,'NA' + else: + v=np.nanmean(PD.y) + s=pretty_num(v) + return (v,s) + + def yMedian(PD): + if PD.yIsString or PD.yIsDate: + return None,'NA' + else: + v=np.nanmedian(PD.y) + s=pretty_num(v) + return (v,s) + + def yStd(PD): + if PD.yIsString or PD.yIsDate: + return None,'NA' + else: + v=np.nanstd(PD.y) + s=pretty_num(v) + return (v,s) + + def yName(PD): + return PD.sy, PD.sy + + def fileName(PD): + return os.path.basename(PD.filename), os.path.basename(PD.filename) + + def baseDir(PD): + return os.path.dirname(PD.filename),os.path.join(os.path.dirname(PD.filename),'') + + def tabName(PD): + return PD.tabname, PD.tabname + + def ylen(PD): + v=len(PD.y) + s='{:d}'.format(v) + return v,s + + + def y0Var(PD): + if PD._y0Std[0] is not None: + v=PD._y0Std[0]**2 + s=pretty_num(v) + else: + v=None + s='NA' + return v,s + + def y0TI(PD): + v=PD._y0Std[0]/PD._y0Mean[0] + s=pretty_num(v) + return v,s + + + def yRange(PD): + if PD.yIsString: + return 'NA','NA' + elif PD.yIsDate: + dtAll=getDt([PD.x[-1]-PD.x[0]]) + return '',pretty_time(dtAll) + else: + v=np.nanmax(PD.y)-np.nanmin(PD.y) + s=pretty_num(v) + return v,s + + def yAbsMax(PD): + if PD.yIsString or PD.yIsDate: + return 'NA','NA' + else: + v=max(np.abs(PD._y0Min[0]),np.abs(PD._y0Max[0])) + s=pretty_num(v) + return v,s + + + def xRange(PD): + if PD.xIsString: + return 'NA','NA' + elif PD.xIsDate: + dtAll=getDt([PD.x[-1]-PD.x[0]]) + return '',pretty_time(dtAll) + else: + v=np.nanmax(PD.x)-np.nanmin(PD.x) + s=pretty_num(v) + return v,s + + + def inty(PD): + if PD.yIsString or PD.yIsDate or PD.xIsString or PD.xIsDate: + return None,'NA' + else: + v=np.trapz(y=PD.y,x=PD.x) + s=pretty_num(v) + return v,s + + def intyintdx(PD): + if PD.yIsString or PD.yIsDate or PD.xIsString or PD.xIsDate: + return None,'NA' + else: + v=np.trapz(y=PD.y,x=PD.x)/np.trapz(y=PD.x*0+1,x=PD.x) + s=pretty_num(v) + return v,s + + def intyx1(PD): + if PD.yIsString or PD.yIsDate or PD.xIsString or PD.xIsDate: + return None,'NA' + else: + v=np.trapz(y=PD.y*PD.x,x=PD.x) + s=pretty_num(v) + return v,s + + def intyx1_scaled(PD): + if PD.yIsString or PD.yIsDate or PD.xIsString or PD.xIsDate: + return None,'NA' + else: + v=np.trapz(y=PD.y*PD.x,x=PD.x) + v=v/np.trapz(y=PD.y,x=PD.x) + s=pretty_num(v) + return v,s + + def intyx2(PD): + if PD.yIsString or PD.yIsDate or PD.xIsString or PD.xIsDate: + return None,'NA' + else: + v=np.trapz(y=PD.y*PD.x**2,x=PD.x) + s=pretty_num(v) + return v,s + + def meas1(PD, xymeas1, xymeas2): + if PD.xyMeasInput1 is not None and PD.xyMeasInput1 == xymeas1: + yv = PD.xyMeas1[1] + s = pretty_num(yv) + else: + xv, yv, s = PD._meas(xymeas1) + PD.xyMeas1 = [xv, yv] + PD.xyMeasInput1 = xymeas1 + return yv, s + + def meas2(PD, xymeas1, xymeas2): + if PD.xyMeasInput2 is not None and PD.xyMeasInput2 == xymeas2: + yv = PD.xyMeas2[1] + s = pretty_num(yv) + else: + xv, yv, s = PD._meas(xymeas2) + PD.xyMeas2 = [xv, yv] + PD.xyMeasInput2 = xymeas2 + return yv, s + + def yMeanMeas(PD): + return PD._measCalc('mean') + + def yMinMeas(PD): + return PD._measCalc('min') + + def yMaxMeas(PD): + return PD._measCalc('max') + + def xAtYMinMeas(PD): + return PD._measCalc('xmin') + + def xAtYMaxMeas(PD): + return PD._measCalc('xmax') + + def _meas(PD, xymeas): + try: + xv, yv = 'NA', 'NA' + xy = np.array([PD.x, PD.y]).transpose() + points = find_closest(xy, [xymeas[0], xymeas[1]], False) + if points.ndim == 1: + xv, yv = points[0:2] + s = pretty_num(yv) + else: + xv, yv = points[0, 0], points[0, 1] + s = ' / '.join([str(p) for p in points[:, 1]]) + except (IndexError, TypeError): + xv, yv = 'NA', 'NA' + s='NA' + return xv, yv, s + + def _measCalc(PD, mode): + if PD.xyMeas1 is None or PD.xyMeas2 is None: + return 'NA', 'NA' + try: + v = 'NA' + left_index = np.where(PD.x == PD.xyMeas1[0])[0][0] + right_index = np.where(PD.x == PD.xyMeas2[0])[0][0] + if left_index == right_index: + raise IndexError + if left_index > right_index: + left_index, right_index = right_index, left_index + if mode == 'mean': + v = np.nanmean(PD.y[left_index:right_index]) + elif mode == 'min': + v = np.nanmin(PD.y[left_index:right_index]) + elif mode == 'max': + v = np.nanmax(PD.y[left_index:right_index]) + elif mode == 'xmin': + v = PD.x[left_index + np.where(PD.y[left_index:right_index] == np.nanmin(PD.y[left_index:right_index]))[0][0]] + elif mode == 'xmax': + v = PD.x[left_index + np.where(PD.y[left_index:right_index] == np.nanmax(PD.y[left_index:right_index]))[0][0]] + else: + raise NotImplementedError('Error: Mode ' + mode + ' not implemented') + s = pretty_num(v) + except (IndexError, TypeError): + v = 'NA' + s = 'NA' + return v, s + + def dx(PD): + if len(PD.x)<=1: + return 'NA','NA' + if PD.xIsString: + return None,'NA' + elif PD.xIsDate: + dt=getDt(PD.x) + return dt,pretty_time(dt) + else: + v=PD.x[1]-PD.x[0] + s=pretty_num(v) + return v,s + + def xMax(PD): + if PD.xIsString: + return PD.x[-1],PD.x[-1] + elif PD.xIsDate: + return PD.x[-1],'{}'.format(PD.x[-1]) + else: + v=np.nanmax(PD.x) + s=pretty_num(v) + return v,s + def xMin(PD): + if PD.xIsString: + return PD.x[0],PD.x[0] + elif PD.xIsDate: + return PD.x[0],'{}'.format(PD.x[0]) + else: + v=np.nanmin(PD.x) + s=pretty_num(v) + return v,s + + def leq(PD,m): + from pydatview.tools.fatigue import eq_load + if PD.yIsString or PD.yIsDate: + return 'NA','NA' + else: + T,_=PD.xRange() + v=eq_load(PD.y, m=m, neq=T)[0][0] + return v,pretty_num(v) + + def Info(PD,var): + if var=='LSeg': + return '','{:d}'.format(PD._Info.LSeg) + elif var=='LWin': + return '','{:d}'.format(PD._Info.LWin) + elif var=='LOvlp': + return '','{:d}'.format(PD._Info.LOvlp) + elif var=='nFFT': + return '','{:d}'.format(PD._Info.nFFT) + + +# --------------------------------------------------------------------------------} +# --- +# --------------------------------------------------------------------------------{ +def compareMultiplePD(PD, mode, sComp): + """ + PD: list of PlotData + sComp: string in ['Relative', '|Relative|', 'Ratio', 'Absolute' + mode: plot mode, nTabs_1Col, nTabs_SameCols, nTabs_SimCols + + return: + PD_comp : new PlotData list that compares the input list PD + + """ + # --- Helper function + def getError(y,yref,method): + if len(y)!=len(yref): + raise NotImplementedError('Cannot compare signals of different lengths') + if sComp=='Relative': + if np.mean(np.abs(yref))<1e-7: + Error=(y-yRef)/(yRef+1)*100 + else: + Error=(y-yRef)/yRef*100 + elif sComp=='|Relative|': + if np.mean(np.abs(yref))<1e-7: + Error=abs((y-yRef)/(yRef+1))*100 + else: + Error=abs((y-yRef)/yRef)*100 + elif sComp=='Ratio': + if np.mean(np.abs(yref))<1e-7: + Error=(y+1)/(yRef+1) + else: + Error=y/yRef + elif sComp=='Absolute': + Error=y-yRef + else: + raise Exception('Something wrong '+sComp) + return Error + + def getErrorLabel(ylab=''): + if len(ylab)>0: + ylab=no_unit(ylab) + ylab='in '+ylab+' ' + if sComp=='Relative': + return 'Relative error '+ylab+'[%]'; + elif sComp=='|Relative|': + return 'Abs. relative error '+ylab+'[%]'; + if sComp=='Ratio': + return 'Ratio '+ylab.replace('in','of')+'[-]'; + elif sComp=='Absolute': + usy = unique([pd.sy for pd in PD]) + yunits= unique([unit(sy) for sy in usy]) + if len(yunits)==1 and len(yunits[0])>0: + return 'Absolute error '+ylab+'['+yunits[0]+']' + else: + return 'Absolute error '+ylab; + elif sComp=='Y-Y': + return PD[0].sy + + xlabelAll=PD[0].sx + + + if any([pd.yIsString for pd in PD]): + raise Exception('Warn: Cannot compare strings') + if any([pd.yIsDate for pd in PD]): + raise Exception('Warn: Cannot compare dates with other values') + + if mode=='nTabs_1Col': + ylabelAll=getErrorLabel(PD[1].sy) + usy = unique([pd.sy for pd in PD]) + #print('Compare - different tabs - 1 col') + st = [pd.st for pd in PD] + if len(usy)==1: + SS=usy[0] + ', '+ ' wrt. '.join(st[::-1]) + if sComp=='Y-Y': + xlabelAll=PD[0].st+', '+PD[0].sy + ylabelAll=PD[1].st+', '+PD[1].sy + else: + SS=' wrt. '.join(usy[::-1]) + if sComp=='Y-Y': + xlabelAll=PD[0].sy + ylabelAll=PD[1].sy + + xRef = PD[0].x + yRef = PD[0].y + PD[1].syl=SS + y=np.interp(xRef,PD[1].x,PD[1].y) + if sComp=='Y-Y': + PD[1].x=yRef + PD[1].y=y + else: + Error = getError(y,yRef,sComp) + PD[1].x=xRef + PD[1].y=Error + PD[1].sx=xlabelAll + PD[1].sy=ylabelAll + PD_comp=[PD[1]] # return + + elif mode=='1Tab_nCols': + # --- Compare one table - different columns + #print('One Tab, different columns') + ylabelAll=getErrorLabel() + xRef = PD[0].x + yRef = PD[0].y + pdRef=PD[0] + for pd in PD[1:]: + if sComp=='Y-Y': + pd.syl = no_unit(pd.sy)+' wrt. '+no_unit(pdRef.sy) + pd.x = yRef + pd.sx = PD[0].sy + else: + pd.syl = no_unit(pd.sy)+' wrt. '+no_unit(pdRef.sy) + pd.sx = xlabelAll + pd.sy = ylabelAll + Error = getError(pd.y,yRef,sComp) + pd.x=xRef + pd.y=Error + PD_comp=PD[1:] + elif mode =='nTabs_SameCols': + # --- Compare different tables, same column + #print('Several Tabs, same columns') + uiy=unique([pd.iy for pd in PD]) + uit=unique([pd.it for pd in PD]) + PD_comp=[] + for iy in uiy: + PD_SameCol=[pd for pd in PD if pd.iy==iy] + xRef = PD_SameCol[0].x + yRef = PD_SameCol[0].y + ylabelAll=getErrorLabel(PD_SameCol[0].sy) + for pd in PD_SameCol[1:]: + if pd.xIsString: + if len(xRef)==len(pd.x): + pass # fine able to interpolate + else: + raise Exception('X values have different length and are strings, cannot interpolate string. Use `Index` for x instead.') + else: + pd.y=np.interp(xRef,pd.x,pd.y) + if sComp=='Y-Y': + pd.x=yRef + pd.sx=PD_SameCol[0].st+', '+PD_SameCol[0].sy + if len(PD_SameCol)==1: + pd.sy =pd.st+', '+pd.sy + else: + pd.syl= pd.st + else: + if len(uit)<=2: + pd.syl = pd.st+' wrt. '+PD_SameCol[0].st+', '+pd.sy + else: + pd.syl = pd.st+'|'+pd.sy + pd.sx = xlabelAll + pd.sy = ylabelAll + Error = getError(pd.y,yRef,sComp) + pd.x=xRef + pd.y=Error + PD_comp.append(pd) + elif mode =='nTabs_SimCols': + # --- Compare different tables, similar columns + print('Several Tabs, similar columns, TODO') + PD_comp=[] + + return PD_comp + diff --git a/pydatview/plugins/__init__.py b/pydatview/plugins/__init__.py index 0e5b4b1..a4d272c 100644 --- a/pydatview/plugins/__init__.py +++ b/pydatview/plugins/__init__.py @@ -18,7 +18,13 @@ def _data_standardizeUnits(mainframe, event=None, label=''): from .data_standardizeUnits import standardizeUnitsPlugin standardizeUnitsPlugin(mainframe, event, label) +def _data_binning(mainframe, event=None, label=''): + from .data_binning import BinningToolPanel + return BinningToolPanel + + dataPlugins=[ - ('Standardize Units (SI)', _data_standardizeUnits), - ('Standardize Units (WE)', _data_standardizeUnits), + ('Bin data' , _data_binning , True ), + ('Standardize Units (SI)', _data_standardizeUnits, False), + ('Standardize Units (WE)', _data_standardizeUnits, False), ] diff --git a/pydatview/plugins/data_binning.py b/pydatview/plugins/data_binning.py new file mode 100644 index 0000000..22a1c25 --- /dev/null +++ b/pydatview/plugins/data_binning.py @@ -0,0 +1,307 @@ +import wx +import numpy as np +import pandas as pd +# import copy +# import platform +# from collections import OrderedDict +# For log dec tool +from pydatview.GUITools import GUIToolPanel, TOOL_BORDER +from pydatview.common import CHAR, Error, Info, pretty_num_short +from pydatview.plotdata import PlotData +# from pydatview.tools.damping import logDecFromDecay +# from pydatview.tools.curve_fitting import model_fit, extract_key_miscnum, extract_key_num, MODELS, FITTERS, set_common_keys + + +# --------------------------------------------------------------------------------} +# --- GUI +# --------------------------------------------------------------------------------{ +class BinningToolPanel(GUIToolPanel): + def __init__(self, parent): + super(BinningToolPanel,self).__init__(parent) + + # --- Data from other modules + self.parent = parent # parent is GUIPlotPanel + # Getting states from parent + if 'Binning' not in self.parent.plotDataOptions.keys() or self.parent.plotDataOptions['Binning'] is None: + self.parent.plotDataOptions['Binning'] =_DEFAULT_DICT.copy() + self.data = self.parent.plotDataOptions['Binning'] + self.data['selectionChangeCallBack'] = self.selectionChange + + + # --- GUI elements + self.btClose = self.getBtBitmap(self, 'Close','close', self.destroy) + self.btAdd = self.getBtBitmap(self, 'Add','add' , self.onAdd) + self.btHelp = self.getBtBitmap(self, 'Help','help', self.onHelp) + self.btClear = self.getBtBitmap(self, 'Clear Plot','sun', self.onClear) + + #self.lb = wx.StaticText( self, -1, """ Click help """) + self.cbTabs = wx.ComboBox(self, -1, choices=[], style=wx.CB_READONLY) + self.scBins = wx.SpinCtrl(self, value='50', style=wx.TE_RIGHT, size=wx.Size(60,-1) ) + self.textXMin = wx.TextCtrl(self, wx.ID_ANY, '', style = wx.TE_PROCESS_ENTER|wx.TE_RIGHT, size=wx.Size(70,-1)) + self.textXMax = wx.TextCtrl(self, wx.ID_ANY, '', style = wx.TE_PROCESS_ENTER|wx.TE_RIGHT, size=wx.Size(70,-1)) + self.btPlot = self.getBtBitmap(self, 'Plot' ,'chart' , self.onPlot) + self.btApply = self.getToggleBtBitmap(self,'Apply','cloud',self.onToggleApply) + self.btXRange = self.getBtBitmap(self, 'Default','compute', self.reset) + self.lbDX = wx.StaticText(self, -1, '') + self.scBins.SetRange(3, 10000) + + boldFont = self.GetFont().Bold() + lbInputs = wx.StaticText(self, -1, 'Inputs: ') + lbInputs.SetFont(boldFont) + + # --- Layout + btSizer = wx.FlexGridSizer(rows=3, cols=2, hgap=2, vgap=0) + btSizer.Add(self.btClose , 0, flag = wx.ALL|wx.EXPAND, border = 1) + btSizer.Add(self.btClear , 0, flag = wx.ALL|wx.EXPAND, border = 1) + btSizer.Add(self.btAdd , 0, flag = wx.ALL|wx.EXPAND, border = 1) + btSizer.Add(self.btPlot , 0, flag = wx.ALL|wx.EXPAND, border = 1) + btSizer.Add(self.btHelp , 0, flag = wx.ALL|wx.EXPAND, border = 1) + btSizer.Add(self.btApply , 0, flag = wx.ALL|wx.EXPAND, border = 1) + + msizer = wx.FlexGridSizer(rows=1, cols=3, hgap=2, vgap=0) + msizer.Add(wx.StaticText(self, -1, 'Table:') , 0, wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM, 1) + msizer.Add(self.cbTabs , 0, wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM, 1) +# msizer.Add(self.btXRange , 0, wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.TOP|wx.BOTTOM|wx.LEFT, 1) + + msizer2 = wx.FlexGridSizer(rows=2, cols=5, hgap=2, vgap=1) + + msizer2.Add(lbInputs , 0, wx.LEFT|wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL , 0) + msizer2.Add(wx.StaticText(self, -1, '#bins: ') , 0, wx.LEFT|wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL , 1) + msizer2.Add(self.scBins , 1, wx.LEFT|wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL, 1) + msizer2.Add(wx.StaticText(self, -1, 'dx: ') , 0, wx.LEFT|wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL , 8) + msizer2.Add(self.lbDX , 1, wx.LEFT|wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.EXPAND, 1) + msizer2.Add(self.btXRange , 0, wx.LEFT|wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL , 0) + msizer2.Add(wx.StaticText(self, -1, 'xmin: ') , 0, wx.LEFT|wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL , 1) + msizer2.Add(self.textXMin , 1, wx.LEFT|wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.EXPAND, 1) + msizer2.Add(wx.StaticText(self, -1, 'xmax: ') , 0, wx.LEFT|wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL , 8) + msizer2.Add(self.textXMax , 1, wx.LEFT|wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.EXPAND, 1) + #msizer2.AddGrowableCol(4,1) + + vsizer = wx.BoxSizer(wx.VERTICAL) + vsizer.Add(msizer,0, flag = wx.TOP ,border = 1) + vsizer.Add(msizer2,0, flag = wx.TOP|wx.EXPAND ,border = 1) + + + self.sizer = wx.BoxSizer(wx.HORIZONTAL) + self.sizer.Add(btSizer ,0, flag = wx.LEFT ,border = 5) + self.sizer.Add(vsizer ,1, flag = wx.LEFT|wx.EXPAND ,border = TOOL_BORDER) + #self.sizer.Add(msizer ,1, flag = wx.LEFT|wx.EXPAND ,border = TOOL_BORDER) + self.SetSizer(self.sizer) + + # --- Events + self.scBins.Bind(wx.EVT_TEXT, self.onParamChange) + self.cbTabs.Bind (wx.EVT_COMBOBOX, self.onTabChange) + self.textXMin.Bind(wx.EVT_TEXT_ENTER, self.onParamChange) + self.textXMax.Bind(wx.EVT_TEXT_ENTER, self.onParamChange) + + # --- Init triggers + if self.data['active']: + self.setXRange(x=[self.data['xMin'], self.data['xMax']]) + else: + self.setXRange() + self.scBins.SetValue(self.data['nBins']) + self.onToggleApply(init=True) + self.updateTabList() + self.onParamChange() + + def reset(self, event=None): + self.setXRange() + self.updateTabList() # might as well until we add a nice callback/button.. + + def setXRange(self, x=None): + if x is None: + x= self.parent.plotData[0].x0 + xmin, xmax = np.nanmin(x), np.nanmax(x) + self.textXMin.SetValue(pretty_num_short(xmin)) + self.textXMax.SetValue(pretty_num_short(xmax)) + + def onParamChange(self, event=None): + self._GUI2Data() + self.lbDX.SetLabel(pretty_num_short((self.data['xMax']- self.data['xMin'])/self.data['nBins'])) + + if self.data['active']: + self.parent.load_and_draw() # Data will change + + def selectionChange(self): + """ function called if user change tables/columns""" + print('>>> Binning selectionChange callback, TODO') + self.setXRange() + + def _GUI2Data(self): + def zero_if_empty(s): + return 0 if len(s)==0 else s + self.data['nBins'] = int (self.scBins.Value) + self.data['xMin'] = float(zero_if_empty(self.textXMin.Value)) + self.data['xMax'] = float(zero_if_empty(self.textXMax.Value)) + + def onToggleApply(self, event=None, init=False): + """ + apply sampler based on GUI Data + """ + if not init: + self.data['active'] = not self.data['active'] + + if self.data['active']: + self._GUI2Data() + self.btPlot.Enable(False) + self.btClear.Enable(False) + self.btApply.SetLabel(CHAR['sun']+' Clear') + else: + self.parent.plotDataOptions['Binning'] = None + self.btPlot.Enable(True) + self.btClear.Enable(True) + self.btApply.SetLabel(CHAR['cloud']+' Apply') + + if not init: + self.parent.plotDataOptions['Binning'] = self.data + self.parent.load_and_draw() # Data will change based on plotData + + + def onAdd(self,event=None): + from pydatview.tools.stats import bin_DF + iSel = self.cbTabs.GetSelection() + tabList = self.parent.selPanel.tabList + mainframe = self.parent.mainframe + icol, colname = self.parent.selPanel.xCol + if self.parent.selPanel.currentMode=='simColumnsMode': + # The difficulty here is that we have to use + # self.parent.selPanel.IKeepPerTab + # or maybe just do it for the first table to get the x column name, + # but there is no guarantee that other tables will have the exact same column name. + Error(self, 'Cannot add tables in "simColumnsMode" for now. Go back to 1 table mode, and add tables individually.') + return + if icol==0: + Error(self, 'Cannot resample based on index') + return + + self._GUI2Data() + errors=[] + + if iSel==0: + # Looping on all tables and adding new table + dfs_new = [] + names_new = [] + for itab, tab in enumerate(tabList): + df_new, name_new = bin_tab(tab, icol, colname, self.data, bAdd=True) + if df_new is not None: + # we don't append when string is empty + dfs_new.append(df_new) + names_new.append(name_new) + else: + errors.append(tab.active_name) + mainframe.load_dfs(dfs_new, names_new, bAdd=True) + else: + tab = tabList.get(iSel-1) + df_new, name_new = bin_tab(tab, icol, colname, self.data, bAdd=True) + if df_new is not None: + mainframe.load_df(df_new, name_new, bAdd=True) + else: + errors.append(tab.active_name) + self.updateTabList() + + if len(errors)>0: + Error(self, 'The binning failed on some tables:\n\n'+'\n'.join(errors)) + return + + def onPlot(self,event=None): + if len(self.parent.plotData)!=1: + Error(self,'Plotting only works for a single plot. Plot less data.') + return + self._GUI2Data() + PD = self.parent.plotData[0] + x_new, y_new = bin_plot(PD.x0, PD.y0, self.data) + + ax = self.parent.fig.axes[0] + PD_new = PlotData() + PD_new.fromXY(x_new, y_new) + self.parent.transformPlotData(PD_new) + ax.plot(PD_new.x, PD_new.y, '-') + self.parent.canvas.draw() + + def onClear(self,event=None): + self.parent.load_and_draw() # Data will change + # Update Table list + self.updateTabList() + + def onTabChange(self,event=None): + #tabList = self.parent.selPanel.tabList + #iSel=self.cbTabs.GetSelection() + pass + + def updateTabList(self,event=None): + tabList = self.parent.selPanel.tabList + tabListNames = ['All opened tables']+tabList.getDisplayTabNames() + try: + iSel=np.max([np.min([self.cbTabs.GetSelection(),len(tabListNames)]),0]) + self.cbTabs.Clear() + [self.cbTabs.Append(tn) for tn in tabListNames] + self.cbTabs.SetSelection(iSel) + except RuntimeError: + pass + + def onHelp(self,event=None): + Info(self,"""Binning. + +The binning operation computes average y values for a set of x ranges. + +To bin perform the following step: + +- Specify the number of bins (#bins) +- Specify the min and max of the x values (or click on "Default") + +- Click on one of the following buttons: + - Plot: will display the binned data on the figure + - Apply: will perform the binning on the fly for all new plots + (click on Clear to stop applying) + - Add: will create new table(s) with biined values for all + signals. This process might take some time. + Select a table or choose all (default) +""") + + +# --------------------------------------------------------------------------------} +# --- DATA +# --------------------------------------------------------------------------------{ +def bin_plot(x, y, opts): + from pydatview.tools.stats import bin_signal + xBins = np.linspace(opts['xMin'], opts['xMax'], opts['nBins']+1) + if xBins[0]>xBins[1]: + raise Exception('xmin must be lower than xmax') + x_new, y_new = bin_signal(x, y, xbins=xBins) + return x_new, y_new + +def bin_tab(tab, iCol, colName, opts, bAdd=True): + # TODO, make it such as it's only handling a dataframe instead of a table + from pydatview.tools.stats import bin_DF + colName = tab.data.columns[iCol-1] + error='' + xBins = np.linspace(opts['xMin'], opts['xMax'], opts['nBins']+1) +# try: + df_new =bin_DF(tab.data, xbins=xBins, colBin=colName) + # Setting bin column as first columns + colNames = list(df_new.columns.values) + colNames.remove(colName) + colNames.insert(0, colName) + df_new=df_new.reindex(columns=colNames) + if bAdd: + name_new=tab.raw_name+'_binned' + else: + name_new=None + tab.data=df_new +# except: +# df_new = None +# name_new = None + + return df_new, name_new + + +_DEFAULT_DICT={ + 'active':False, + 'xMin':None, + 'xMax':None, + 'nBins':50, + 'dx':0, + 'applyCallBack':bin_plot, + 'selectionChangeCallBack':None, +} + diff --git a/pydatview/tools/stats.py b/pydatview/tools/stats.py index 1d96848..90dd910 100644 --- a/pydatview/tools/stats.py +++ b/pydatview/tools/stats.py @@ -174,6 +174,27 @@ def bin_DF(df, xbins, colBin, stats='mean'): df2 = df2.reindex(xmid) return df2 +def bin_signal(x, y, xbins=None, stats='mean', nBins=None): + """ + Perform bin averaging of a dataframe + INPUTS: + - df : pandas dataframe + - xBins: end points delimiting the bins, array of ascending x values) + - colBin: column name (string) of the dataframe, used for binning + OUTPUTS: + binned dataframe, with additional columns 'Counts' for the number + + """ + if xbins is None: + xmin, xmax = np.min(x), np.max(x) + dx = (xmax-xmin)/nBins + xbins=np.arange(xmin, xmax+dx/2, dx) + df = pd.DataFrame(data=np.column_stack((x,y)), columns=['x','y']) + df2 = bin_DF(df, xbins, colBin='x', stats=stats) + return df2['x'].values, df2['y'].values + + + def azimuthal_average_DF(df, psiBin=np.arange(0,360+1,10), colPsi='Azimuth_[deg]', tStart=None, colTime='Time_[s]'): """ Average a dataframe based on azimuthal value From 958d4bbd62012a894ae66dffe4000935f3744579 Mon Sep 17 00:00:00 2001 From: Emmanuel Branlard Date: Thu, 3 Feb 2022 17:19:04 -0700 Subject: [PATCH 22/36] Update of weio/welib --- pydatview/fast/postpro.py | 108 +++++++++++++++++++++++++++++++++----- pydatview/fast/runner.py | 30 ++++++----- pydatview/tools/stats.py | 31 ++++++++--- weio | 2 +- 4 files changed, 138 insertions(+), 33 deletions(-) diff --git a/pydatview/fast/postpro.py b/pydatview/fast/postpro.py index d27a397..8555d11 100644 --- a/pydatview/fast/postpro.py +++ b/pydatview/fast/postpro.py @@ -651,7 +651,7 @@ def insert_extra_columns_AD(dfRad, tsAvg, vr=None, rho=None, R=None, nB=None, ch def spanwisePostPro(FST_In=None,avgMethod='constantwindow',avgParam=5,out_ext='.outb',df=None): """ - Postprocess FAST radial data + Postprocess FAST radial data. Average the time series, return a dataframe nr x nColumns INPUTS: - FST_IN: Fast .fst input file @@ -721,7 +721,7 @@ def spanwisePostPro(FST_In=None,avgMethod='constantwindow',avgParam=5,out_ext='. def spanwisePostProRows(df, FST_In=None): """ - Returns a 3D matrix: n x nSpan x nColumn where df is of size n x nColumn + Returns a 3D matrix: n x nSpan x nColumn where df is of size n x mColumn NOTE: this is really not optimal. Spanwise columns should be extracted only once.. """ @@ -746,6 +746,7 @@ def spanwisePostProRows(df, FST_In=None): try: rho = fst.AD['AirDens'] except: + print('[WARN] Using default air density (1.225)') pass # --- Extract radial data for each azimuthal average M_AD=None @@ -1034,7 +1035,7 @@ def _zero_crossings(y,x=None,direction=None): raise Exception('Direction should be either `up` or `down`') return xzc, iBef, sign -def find_matching_pattern(List, pattern): +def find_matching_pattern(List, pattern, sort=False): """ Return elements of a list of strings that match a pattern and return the first matching group """ @@ -1049,11 +1050,47 @@ def find_matching_pattern(List, pattern): MatchedStrings.append(match.groups(1)[0]) else: MatchedStrings.append('') + + if sort: + # Sorting by Matched string, NOTE: assumes that MatchedStrings are int. + # that's probably not necessary since alphabetical/integer sorting should be the same + # but it might be useful if number of leading zero differs, which would skew the sorting.. + MatchedElements = np.asarray(MatchedElements) + MatchedStrings = np.asarray(MatchedStrings) + Idx = np.array([int(s) for s in MatchedStrings]) + Isort = np.argsort(Idx) + Idx = Idx[Isort] + MatchedElements = MatchedElements[Isort] + MatchedStrings = MatchedStrings[Isort] + return MatchedElements, MatchedStrings +def extractSpanTS(df, pattern): + r""" + Extract spanwise time series of a given "type" (e.g. Cl for each radial node) + Return a dataframe of size nt x nr + + NOTE: time is not inserted in the output dataframe + + To find "r" use FASTRadialOutputs, it is different for AeroDyn/ElastoDyn/BeamDyn/ + There is no guarantee that the number of columns matching pattern will exactly + corresponds to the number of radial stations. That's the responsability of the + OpenFAST user. + + INPUTS: + - df : a dataframe of size nt x nColumns + - pattern: Pattern used to find "radial" columns amongst the dataframe columns + r'B1N(\d*)Cl_\[-\]' + r'^AB1N(\d*)Cl_\[-\]' -> to match AB1N001Cl_[-], AB1N002Cl_[-], etc. + OUTPUTS: + - dfOut : a dataframe of size nt x nr where nr is the number of radial stations matching the pattern. The radial stations are sorted. + """ + cols, sIdx = find_matching_pattern(df.columns, pattern, sort=True) + return df[cols] + -def extractSpanTSReg(ts, col_pattern, colname, IR=None): +def _extractSpanTSReg_Legacy(ts, col_pattern, colname, IR=None): r""" Helper function to extract spanwise results, like B1N1Cl B1N2Cl etc. Example @@ -1061,17 +1098,10 @@ def extractSpanTSReg(ts, col_pattern, colname, IR=None): colname : r'B1Cl_[-]' """ # Extracting columns matching pattern - cols, sIdx = find_matching_pattern(ts.keys(), col_pattern) + cols, sIdx = find_matching_pattern(ts.keys(), col_pattern, sort=True) if len(cols) ==0: return (None,None) - # Sorting by ID - cols = np.asarray(cols) - Idx = np.array([int(s) for s in sIdx]) - Isort = np.argsort(Idx) - Idx = Idx[Isort] - cols = cols[Isort] - nrMax = np.max(Idx) Values = np.zeros((nrMax,1)) Values[:] = np.nan @@ -1091,7 +1121,7 @@ def extractSpanTSReg(ts, col_pattern, colname, IR=None): print('[WARN] More values found for {}, found {}/{}'.format(colname,len(cols),nrMax)) return (colname,Values) -def extractSpanTS(ts, nr, col_pattern, colname, IR=None): +def _extractSpanTS_Legacy(ts, nr, col_pattern, colname, IR=None): """ Helper function to extract spanwise results, like B1N1Cl B1N2Cl etc. Example @@ -1363,5 +1393,57 @@ def averagePostPro(outFiles,avgMethod='periods',avgParam=None,ColMap=None,ColKee return result +def integrateMoment(r, F): + """ + Integrate moment from force and radial station + M_j = \int_{r_j}^(r_n) f(r) * (r-r_j) dr for j=1,nr + TODO: integrate analytically the "r" part + """ + M = np.zeros(len(r)-1) + for ir,_ in enumerate(r[:-1]): + M[ir] = np.trapz(F[ir:]*(r[ir:]-r[ir]), r[ir:]-r[ir]) + return M + +def integrateMomentTS(r, F): + """ + Integrate moment from time series of forces at nr radial stations + + Compute + M_j = \int_{r_j}^(r_n) f(r) * (r-r_j) dr for j=1,nr + M_j = \int_{r_j}^(r_n) f(r) *r*dr - r_j * \int_(r_j}^{r_n} f(r) dr + j are the columns of M + + NOTE: simply trapezoidal integration is used. + The "r" term is not integrated analytically. This can be improved! + + INPUTS: + - r: array of size nr, of radial stations (ordered) + - F: array nt x nr of time series of forces at each radial stations + OUTPUTS: + - M: array nt x nr of integrated moment at each radial station + + """ + import scipy.integrate as si + # Compute \int_{r_j}^{r_n} f(r) dr, with "j" each column + IF = np.fliplr(-si.cumtrapz(np.fliplr(F), r[-1::-1])) + # Compute \int_{r_j}^{r_n} f(r)*r dr, with "j" each column + FR = F * r + IFR = np.fliplr(-si.cumtrapz(np.fliplr(FR), r[-1::-1])) + # Compute x_j * \int_{r_j}^(r_n) f(r) * r dr + R_IF = IF * r[:-1] + # \int_{r_j}^(r_n) f(r) * (r-r_j) dr = IF + IFR + M = IFR - R_IF + + + # --- Sanity checks + M0 = integrateMoment(r, F[0,:]) + Mm1 = integrateMoment(r, F[-1,:]) + if np.max(np.abs(M0-M[0,:]))>1e-8: + raise Exception('>>> Inaccuracies in integrateMomentTS') + if np.max(np.abs(Mm1-M[-1,:]))>1e-8: + raise Exception('>>> Inaccuracies in integrateMomentTS') + + return M + if __name__ == '__main__': main() diff --git a/pydatview/fast/runner.py b/pydatview/fast/runner.py index 3a1cb57..d707088 100644 --- a/pydatview/fast/runner.py +++ b/pydatview/fast/runner.py @@ -24,7 +24,7 @@ # --- Tools for executing FAST # --------------------------------------------------------------------------------{ # --- START cmd.py -def run_cmds(inputfiles, exe, parallel=True, showOutputs=True, nCores=None, showCommand=True): +def run_cmds(inputfiles, exe, parallel=True, showOutputs=True, nCores=None, showCommand=True, verbose=True): """ Run a set of simple commands of the form `exe input_file` By default, the commands are run in "parallel" (though the method needs to be improved) The stdout and stderr may be displayed on screen (`showOutputs`) or hidden. @@ -33,13 +33,15 @@ def run_cmds(inputfiles, exe, parallel=True, showOutputs=True, nCores=None, show Failed=[] def _report(p): if p.returncode==0: - print('[ OK ] Input : ',p.input_file) + if verbose: + print('[ OK ] Input : ',p.input_file) else: Failed.append(p) - print('[FAIL] Input : ',p.input_file) - print(' Directory: '+os.getcwd()) - print(' Command : '+p.cmd) - print(' Use `showOutputs=True` to debug, or run the command above.') + if verbose: + print('[FAIL] Input : ',p.input_file) + print(' Directory: '+os.getcwd()) + print(' Command : '+p.cmd) + print(' Use `showOutputs=True` to debug, or run the command above.') #out, err = p.communicate() #print('StdOut:\n'+out) #print('StdErr:\n'+err) @@ -70,13 +72,14 @@ def _report(p): _report(p) # --- Giving a summary if len(Failed)==0: - print('[ OK ] All simulations run successfully.') - return True + if verbose: + print('[ OK ] All simulations run successfully.') + return True, Failed else: print('[FAIL] {}/{} simulations failed:'.format(len(Failed),len(inputfiles))) for p in Failed: print(' ',p.input_file) - return False + return False, Failed def run_cmd(input_file_or_arglist, exe, wait=True, showOutputs=False, showCommand=True): """ Run a simple command of the form `exe input_file` or `exe arg1 arg2` """ @@ -118,7 +121,7 @@ class Dummy(): return p # --- END cmd.py -def run_fastfiles(fastfiles, fastExe=None, parallel=True, showOutputs=True, nCores=None, showCommand=True, reRun=True): +def run_fastfiles(fastfiles, fastExe=None, parallel=True, showOutputs=True, nCores=None, showCommand=True, reRun=True, verbose=True): if fastExe is None: fastExe=FAST_EXE if not reRun: @@ -133,7 +136,7 @@ def run_fastfiles(fastfiles, fastExe=None, parallel=True, showOutputs=True, nCor newfiles.append(f) fastfiles=newfiles - return run_cmds(fastfiles, fastExe, parallel=parallel, showOutputs=showOutputs, nCores=nCores, showCommand=showCommand) + return run_cmds(fastfiles, fastExe, parallel=parallel, showOutputs=showOutputs, nCores=nCores, showCommand=showCommand, verbose=verbose) def run_fast(input_file, fastExe=None, wait=True, showOutputs=False, showCommand=True): if fastExe is None: @@ -141,7 +144,7 @@ def run_fast(input_file, fastExe=None, wait=True, showOutputs=False, showCommand return run_cmd(input_file, fastExe, wait=wait, showOutputs=showOutputs, showCommand=showCommand) -def writeBatch(batchfile, fastfiles, fastExe=None, nBatches=1): +def writeBatch(batchfile, fastfiles, fastExe=None, nBatches=1, pause=False): """ Write batch file, everything is written relative to the batch file""" if fastExe is None: fastExe=FAST_EXE @@ -156,6 +159,9 @@ def writeb(batchfile, fastfiles): ff_rel = os.path.relpath(ff_abs, batchdir) l = fastExe_rel + ' '+ ff_rel f.write("%s\n" % l) + if pause: + f.write("pause\n") # windows only.. + if nBatches==1: writeb(batchfile, fastfiles) else: diff --git a/pydatview/tools/stats.py b/pydatview/tools/stats.py index 90dd910..aaa66dc 100644 --- a/pydatview/tools/stats.py +++ b/pydatview/tools/stats.py @@ -53,16 +53,22 @@ def rsquare(y,f, c = True): rmse = np.sqrt(np.mean((y - f) ** 2)) return r2,rmse -def mean_rel_err(t1, y1, t2, y2, method='mean'): +def mean_rel_err(t1=None, y1=None, t2=None, y2=None, method='mean', verbose=False): """ + return mean relative error in % + Methods: 'mean' : 100 * |y1-y2|/mean(y1) 'meanabs': 100 * |y1-y2|/mean(|y1|) - 'minmax': y1 and y2 scaled between 0.001 and 1 + 'minmax': y1 and y2 scaled between 0.5 and 1.5 |y1s-y2s|/|y1| + '0-2': signals are scalled between 0 & 2 """ - if len(y1)!=len(y2): - y2=np.interp(t1,t2,y2) + if t1 is None and t2 is None: + pass + else: + if len(y1)!=len(y2): + y2=np.interp(t1,t2,y2) # Method 1 relative to mean if method=='mean': ref_val = np.mean(y1) @@ -74,10 +80,21 @@ def mean_rel_err(t1, y1, t2, y2, method='mean'): # Method 2 scaling signals Min=min(np.min(y1), np.min(y2)) Max=max(np.max(y1), np.max(y2)) - y1=(y1-Min)/(Max-Min)+0.001 - y2=(y2-Min)/(Max-Min)+0.001 + y1=(y1-Min)/(Max-Min)+0.5 + y2=(y2-Min)/(Max-Min)+0.5 meanrelerr = np.mean(np.abs(y1-y2)/np.abs(y1))*100 - #print('Mean rel error {:7.2f} %'.format( meanrelerr)) + elif method=='1-2': + # transform values from 1 to 2 + Min=min(np.min(y1), np.min(y2)) + Max=max(np.max(y1), np.max(y2)) + y1 = (y1-Min)/(Max-Min)+1 + y2 = (y2-Min)/(Max-Min)+1 + meanrelerr = np.mean(np.abs(y1-y2)/np.abs(y1))*100 + else: + raise Exception('Unknown method',method) + + if verbose: + print('Mean rel error {:7.2f} %'.format( meanrelerr)) return meanrelerr diff --git a/weio b/weio index 4dc177c..da1cc1a 160000 --- a/weio +++ b/weio @@ -1 +1 @@ -Subproject commit 4dc177c24eda16e50df831dd8f5cd0dcfddfe274 +Subproject commit da1cc1aa58eaea50844d0b9d4ccfc5d629706d90 From 8f5f145d38ce7cd0e63d6aff7ee7bf91a177184d Mon Sep 17 00:00:00 2001 From: Emmanuel Branlard Date: Thu, 10 Feb 2022 21:45:05 -0700 Subject: [PATCH 23/36] Reading user file formats from weio UserData directory --- README.md | 1 - pydatview/main.py | 27 +++++++++++++++++++-------- weio | 2 +- 3 files changed, 20 insertions(+), 10 deletions(-) diff --git a/README.md b/README.md index 2f14492..637ab40 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,4 @@ [![Build status](https://github.com/ebranlard/pyDatView/workflows/Tests/badge.svg)](https://github.com/ebranlard/pyDatView/actions?query=workflow%3A%22Tests%22) -[![Build Status](https://travis-ci.com/ebranlard/pyDatView.svg?branch=master)](https://travis-ci.com/ebranlard/pyDatView) Donate just a small amount, buy me a coffee! diff --git a/pydatview/main.py b/pydatview/main.py index 2f60547..783da36 100644 --- a/pydatview/main.py +++ b/pydatview/main.py @@ -47,7 +47,6 @@ PROG_VERSION='v0.2-local' try: import weio # File Formats and File Readers - FILE_FORMATS= weio.fileFormats() except: print('') print('Error: the python package `weio` was not imported successfully.\n') @@ -57,9 +56,7 @@ print('Alternatively re-clone this repository into a separate folder:\n') print(' git clone --recurse-submodules https://github.com/ebranlard/pyDatView\n') sys.exit(-1) -FILE_FORMATS_EXTENSIONS = [['.*']]+[f.extensions for f in FILE_FORMATS] -FILE_FORMATS_NAMES = ['auto (any supported file)'] + [f.name for f in FILE_FORMATS] -FILE_FORMATS_NAMEXT =['{} ({})'.format(n,','.join(e)) for n,e in zip(FILE_FORMATS_NAMES,FILE_FORMATS_EXTENSIONS)] + SIDE_COL = [160,160,300,420,530] SIDE_COL_LARGE = [200,200,360,480,600] @@ -93,7 +90,7 @@ def OnDropFiles(self, x, y, filenames): if iFormat==0: # auto-format Format = None else: - Format = FILE_FORMATS[iFormat-1] + Format = parent.FILE_FORMATS[iFormat-1] self.parent.load_files(filenames, fileformats=[Format]*len(filenames), bAdd=bAdd) return True @@ -159,10 +156,20 @@ def __init__(self, filename=None): self.SetMenuBar(menuBar) self.Bind(wx.EVT_MENU,self.onAbout,aboutMenuItem) + + self.FILE_FORMATS, errors= weio.fileFormats(ignoreErrors=True, verbose=False) + if len(errors)>0: + for e in errors: + Warn(self, e) + + self.FILE_FORMATS_EXTENSIONS = [['.*']]+[f.extensions for f in self.FILE_FORMATS] + self.FILE_FORMATS_NAMES = ['auto (any supported file)'] + [f.name for f in self.FILE_FORMATS] + self.FILE_FORMATS_NAMEXT =['{} ({})'.format(n,','.join(e)) for n,e in zip(self.FILE_FORMATS_NAMES,self.FILE_FORMATS_EXTENSIONS)] + # --- ToolBar tb = self.CreateToolBar(wx.TB_HORIZONTAL|wx.TB_TEXT|wx.TB_HORZ_LAYOUT) self.toolBar = tb - self.comboFormats = wx.ComboBox(tb, choices = FILE_FORMATS_NAMEXT, style=wx.CB_READONLY) + self.comboFormats = wx.ComboBox(tb, choices = self.FILE_FORMATS_NAMEXT, style=wx.CB_READONLY) self.comboFormats.SetSelection(0) self.comboMode = wx.ComboBox(tb, choices = SEL_MODES, style=wx.CB_READONLY) self.comboMode.SetSelection(0) @@ -517,7 +524,11 @@ def onSave(self, event=None): self.plotPanel.navTB.save_figure() def onAbout(self, event=None): - Info(self,PROG_NAME+' '+PROG_VERSION+'\n\nVisit http://github.com/ebranlard/pyDatView for documentation.') + defaultDir = weio.defaultUserDataDir() # TODO input file options + Info(self,PROG_NAME+' '+PROG_VERSION+'\n\n' + 'pyDatView data directory:\n {}\n'.format(os.path.join(defaultDir,'pyDatView'))+ + 'weio data directory: \n {}\n'.format(os.path.join(defaultDir,'weio'))+ + '\n\nVisit http://github.com/ebranlard/pyDatView for documentation.') def onReload(self, event=None): filenames, fileformats = self.tabList.filenames_and_formats @@ -562,7 +573,7 @@ def selectFile(self,bAdd=False): if iFormat==0: # auto-format Format = None #wildcard = 'all (*.*)|*.*' - wildcard='|'.join([n+'|*'+';*'.join(e) for n,e in zip(FILE_FORMATS_NAMEXT,FILE_FORMATS_EXTENSIONS)]) + wildcard='|'.join([n+'|*'+';*'.join(e) for n,e in zip(self.FILE_FORMATS_NAMEXT,self.FILE_FORMATS_EXTENSIONS)]) #wildcard = sFormat + extensions+'|all (*.*)|*.*' else: Format = FILE_FORMATS[iFormat-1] diff --git a/weio b/weio index da1cc1a..9d0dede 160000 --- a/weio +++ b/weio @@ -1 +1 @@ -Subproject commit da1cc1aa58eaea50844d0b9d4ccfc5d629706d90 +Subproject commit 9d0deded1260b482306155040b8ba2c748813d53 From d1b21a1239cf82f1d9d90aad600e915ae85450e5 Mon Sep 17 00:00:00 2001 From: Emmanuel Branlard Date: Thu, 10 Feb 2022 21:46:36 -0700 Subject: [PATCH 24/36] Update of weio --- weio | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/weio b/weio index 9d0dede..9243842 160000 --- a/weio +++ b/weio @@ -1 +1 @@ -Subproject commit 9d0deded1260b482306155040b8ba2c748813d53 +Subproject commit 9243842cf41c2a3437c590c58e633c7019381b23 From 310b3d8dc18c8a3e4fd8dc269f754c0471fa6110 Mon Sep 17 00:00:00 2001 From: Emmanuel Branlard Date: Fri, 11 Feb 2022 23:08:48 -0700 Subject: [PATCH 25/36] Save GUI options using a config file (Closes #94, Closes #92) --- pydatview/GUICommon.py | 77 ++++++------ pydatview/GUIInfoPanel.py | 257 ++++++++++++++++++++------------------ pydatview/GUIPlotPanel.py | 62 +++++++-- pydatview/appdata.py | 147 ++++++++++++++++++++++ pydatview/common.py | 28 ----- pydatview/main.py | 74 +++++++---- 6 files changed, 423 insertions(+), 222 deletions(-) create mode 100644 pydatview/appdata.py diff --git a/pydatview/GUICommon.py b/pydatview/GUICommon.py index 2ab907f..59470be 100644 --- a/pydatview/GUICommon.py +++ b/pydatview/GUICommon.py @@ -3,55 +3,58 @@ import os import platform +_MONOFONTSIZE=9 +_FONTSIZE=9 + # --------------------------------------------------------------------------------} -# --- +# --- FONT # --------------------------------------------------------------------------------{ -def getMonoFontAbs(): - #return wx.Font(9, wx.MODERN, wx.NORMAL, wx.NORMAL, False, u'Monospace') - if os.name=='nt': - font=wx.Font(9, wx.TELETYPE, wx.NORMAL, wx.NORMAL, False) - elif os.name=='posix': - font=wx.Font(10, wx.TELETYPE, wx.NORMAL, wx.NORMAL, False) - else: - font=wx.Font(8, wx.TELETYPE, wx.NORMAL, wx.NORMAL, False) +def setMonoFontSize(fs): + global _MONOFONTSIZE + _MONOFONTSIZE=int(fs) + +def getMonoFontSize(): + global _MONOFONTSIZE + return _MONOFONTSIZE + +def setFontSize(fs): + global _FONTSIZE + _FONTSIZE=int(fs) + +def getFontSize(): + global _FONTSIZE + return _FONTSIZE + + +def getFont(widget): + global _FONTSIZE + font = widget.GetFont() + #font.SetFamily(wx.TELETYPE) + font.SetPointSize(_FONTSIZE) + #font=wx.Font(_FONTSIZE-1, wx.TELETYPE, wx.NORMAL, wx.NORMAL, False) return font def getMonoFont(widget): + global _MONOFONTSIZE font = widget.GetFont() font.SetFamily(wx.TELETYPE) - if platform.system()=='Windows': - pass - elif platform.system()=='Linux': - pass - elif platform.system()=='Darwin': - font.SetPointSize(font.GetPointSize()-1) - else: - pass + font.SetPointSize(_MONOFONTSIZE) return font - - -# def getColumn(df,i): -# if i == wx.NOT_FOUND or i == 0: -# x = np.array(range(df.shape[0])) -# c = None -# isString = False -# isDate = False -# else: -# c = df.iloc[:, i-1] -# x = df.iloc[:, i-1].values -# isString = c.dtype == np.object and isinstance(c.values[0], str) -# if isString: -# x=x.astype(str) -# isDate = np.issubdtype(c.dtype, np.datetime64) -# if isDate: -# x=x.astype('datetime64[s]') -# -# return x,isString,isDate,c -# # --------------------------------------------------------------------------------} # --- Helper functions # --------------------------------------------------------------------------------{ +def About(parent, message): + class MessageBox(wx.Dialog): + def __init__(self, parent, title, message): + wx.Dialog.__init__(self, parent, title=title, style=wx.CAPTION|wx.CLOSE_BOX) + text = wx.TextCtrl(self, style=wx.TE_READONLY|wx.BORDER_NONE|wx.TE_MULTILINE|wx.TE_AUTO_URL) + text.SetValue(message) + text.SetBackgroundColour(wx.SystemSettings.GetColour(4)) + self.ShowModal() + self.Destroy() + MessageBox(parent, 'About', message) + def YesNo(parent, question, caption = 'Yes or no?'): dlg = wx.MessageDialog(parent, question, caption, wx.YES_NO | wx.ICON_QUESTION) result = dlg.ShowModal() == wx.ID_YES diff --git a/pydatview/GUIInfoPanel.py b/pydatview/GUIInfoPanel.py index 5d80862..e198a78 100644 --- a/pydatview/GUIInfoPanel.py +++ b/pydatview/GUIInfoPanel.py @@ -59,11 +59,16 @@ def onSelChange(self,event): self.parent._showStats(erase=True) +def selectColumns(columnList, selectedNames): + """ set Columns as selected based on a list of column names """ + for col in columnList: + col['s']=col['name'] in selectedNames + class InfoPanel(wx.Panel): """ Display the list of the columns for the user to select """ #---------------------------------------------------------------------- - def __init__(self, parent): + def __init__(self, parent, data=None): wx.Panel.__init__(self, parent, -1) # --- # List of dictionaries for available "statistical" signals. Dictionary keys: @@ -71,130 +76,144 @@ def __init__(self, parent): # al : alignement (L,R,C for left,right or center) # f : function used to evaluate value # s : selected or not + if data is None: + print('>>> Using default settings for info panel') + from .appdata import defaultInfoPanelData + data = defaultInfoPanelData() + + + # TODO TODO Consider using an OrderedDict instead, with 'name' as key, and maybe use function instead of string self.ColsReg=[] - self.ColsReg.append({'name':'Directory' , 'al':'L' , 'm':'baseDir' , 's':False}) - self.ColsReg.append({'name':'Filename' , 'al':'L' , 'm':'fileName', 's':False}) - self.ColsReg.append({'name':'Table' , 'al':'L' , 'm':'tabName' , 's':False}) - self.ColsReg.append({'name':'Column' , 'al':'L' , 'm':'yName' , 's':True}) - self.ColsReg.append({'name':'Median' , 'al':'R' , 'm':'yMedian' , 's' :False}) - self.ColsReg.append({'name':'Mean' , 'al':'R' , 'm':'y0Mean' , 's' :True}) - self.ColsReg.append({'name':'Std' , 'al':'R' , 'm':'y0Std' , 's' :True}) - self.ColsReg.append({'name':'Var' , 'al':'R' , 'm':'y0Var' , 's' :False}) - self.ColsReg.append({'name':'Std/Mean (TI)', 'al':'R' , 'm':'y0TI' , 's' :False}) - self.ColsReg.append({'name':'Min' , 'al':'R' , 'm':'y0Min' , 's' :True}) - self.ColsReg.append({'name':'Max' , 'al':'R' , 'm':'y0Max' , 's' :True}) - self.ColsReg.append({'name':'x@Min' , 'al':'R' , 'm':'xAtYMin' , 's' :False}) - self.ColsReg.append({'name':'x@Max' , 'al':'R' , 'm':'xAtYMax' , 's' :False}) - self.ColsReg.append({'name':'Abs. Max' , 'al':'R' , 'm':'yAbsMax', 's' :False}) - self.ColsReg.append({'name':'Range' , 'al':'R' , 'm':'yRange', 's' :True}) - self.ColsReg.append({'name':'dx' , 'al':'R' , 'm':'dx' , 's' :True}) - self.ColsReg.append({'name':'Meas 1' , 'al':'R' , 'm':'meas1' , 's' :False}) - self.ColsReg.append({'name':'Meas 2' , 'al':'R' , 'm':'meas2' , 's' :False}) - self.ColsReg.append({'name':'Mean (Meas)' , 'al':'R' , 'm':'yMeanMeas' , 's' :False}) - self.ColsReg.append({'name':'Min (Meas)' , 'al':'R' , 'm':'yMinMeas' , 's' :False}) - self.ColsReg.append({'name':'Max (Meas)' , 'al':'R' , 'm':'yMaxMeas' , 's' :False}) - self.ColsReg.append({'name':'x@Min (Meas)' , 'al':'R' , 'm':'xAtYMinMeas' , 's' :False}) - self.ColsReg.append({'name':'x@Max (Meas)' , 'al':'R' , 'm':'xAtYMaxMeas' , 's' :False}) - self.ColsReg.append({'name':'xMin' , 'al':'R' , 'm':'xMin' , 's' :False}) - self.ColsReg.append({'name':'xMax' , 'al':'R' , 'm':'xMax' , 's' :False}) - self.ColsReg.append({'name':'xRange' , 'al':'R' , 'm':'xRange', 's' :False}) - self.ColsReg.append({'name':u'\u222By' , 'al':'R' , 'm':'inty' , 's' :False}) - self.ColsReg.append({'name':u'\u222By/\u222Bdx', 'al':'R' , 'm':'intyintdx' , 's' :False}) - self.ColsReg.append({'name':u'\u222By.x ' , 'al':'R' , 'm':'intyx1' , 's' :False}) - self.ColsReg.append({'name':u'\u222By.x/\u222By','al':'R' , 'm':'intyx1_scaled' , 's' :False}) - self.ColsReg.append({'name':u'\u222By.x^2' , 'al':'R' , 'm':'intyx2' , 's' :False}) - self.ColsReg.append({'name':'L_eq(m=3)' , 'al':'R' , 'f':lambda x:x.leq(m=3) , 's' :False}) - self.ColsReg.append({'name':'L_eq(m=4)' , 'al':'R' , 'f':lambda x:x.leq(m=4) , 's' :False}) - self.ColsReg.append({'name':'L_eq(m=5)' , 'al':'R' , 'f':lambda x:x.leq(m=5) , 's' :False}) - self.ColsReg.append({'name':'L_eq(m=7)' , 'al':'R' , 'f':lambda x:x.leq(m=7) , 's' :False}) - self.ColsReg.append({'name':'L_eq(m=8)' , 'al':'R' , 'f':lambda x:x.leq(m=8) , 's' :False}) - self.ColsReg.append({'name':'L_eq(m=9)' , 'al':'R' , 'f':lambda x:x.leq(m=9) , 's' :False}) - self.ColsReg.append({'name':'L_eq(m=10)' , 'al':'R' , 'f':lambda x:x.leq(m=10), 's' :False}) - self.ColsReg.append({'name':'L_eq(m=12)' , 'al':'R' , 'f':lambda x:x.leq(m=12), 's' :False}) - self.ColsReg.append({'name':'n' , 'al':'R' , 'm':'ylen' , 's' :True}) + self.ColsReg.append({'name':'Directory' , 'al':'L' , 'm':'baseDir' , 's' :False}) + self.ColsReg.append({'name':'Filename' , 'al':'L' , 'm':'fileName', 's' :False}) + self.ColsReg.append({'name':'Table' , 'al':'L' , 'm':'tabName' , 's' :False}) + self.ColsReg.append({'name':'Column' , 'al':'L' , 'm':'yName' , 's' :True}) + self.ColsReg.append({'name':'Median' , 'al':'R' , 'm':'yMedian' , 's' :False}) + self.ColsReg.append({'name':'Mean' , 'al':'R' , 'm':'y0Mean' , 's' :True}) + self.ColsReg.append({'name':'Std' , 'al':'R' , 'm':'y0Std' , 's' :True}) + self.ColsReg.append({'name':'Var' , 'al':'R' , 'm':'y0Var' , 's' :False}) + self.ColsReg.append({'name':'Std/Mean (TI)', 'al':'R' , 'm':'y0TI' , 's' :False}) + self.ColsReg.append({'name':'Min' , 'al':'R' , 'm':'y0Min' , 's' :True}) + self.ColsReg.append({'name':'Max' , 'al':'R' , 'm':'y0Max' , 's' :True}) + self.ColsReg.append({'name':'x@Min' , 'al':'R' , 'm':'xAtYMin' , 's' :False}) + self.ColsReg.append({'name':'x@Max' , 'al':'R' , 'm':'xAtYMax' , 's' :False}) + self.ColsReg.append({'name':'Abs. Max' , 'al':'R' , 'm':'yAbsMax', 's' :False}) + self.ColsReg.append({'name':'Range' , 'al':'R' , 'm':'yRange', 's' :True}) + self.ColsReg.append({'name':'dx' , 'al':'R' , 'm':'dx' , 's' :True}) + self.ColsReg.append({'name':'Meas 1' , 'al':'R' , 'm':'meas1' , 's' :False}) + self.ColsReg.append({'name':'Meas 2' , 'al':'R' , 'm':'meas2' , 's' :False}) + self.ColsReg.append({'name':'Mean (Meas)' , 'al':'R' , 'm':'yMeanMeas' , 's' :False}) + self.ColsReg.append({'name':'Min (Meas)' , 'al':'R' , 'm':'yMinMeas' , 's' :False}) + self.ColsReg.append({'name':'Max (Meas)' , 'al':'R' , 'm':'yMaxMeas' , 's' :False}) + self.ColsReg.append({'name':'x@Min (Meas)' , 'al':'R' , 'm':'xAtYMinMeas' , 's' :False}) + self.ColsReg.append({'name':'x@Max (Meas)' , 'al':'R' , 'm':'xAtYMaxMeas' , 's' :False}) + self.ColsReg.append({'name':'xMin' , 'al':'R' , 'm':'xMin' , 's' :False}) + self.ColsReg.append({'name':'xMax' , 'al':'R' , 'm':'xMax' , 's' :False}) + self.ColsReg.append({'name':'xRange' , 'al':'R' , 'm':'xRange', 's' :False}) + self.ColsReg.append({'name':u'\u222By' , 'al':'R' , 'm':'inty' , 's' :False}) + self.ColsReg.append({'name':u'\u222By/\u222Bdx', 'al':'R' , 'm':'intyintdx' , 's' :False}) + self.ColsReg.append({'name':u'\u222By.x ' , 'al':'R' , 'm':'intyx1' , 's' :False}) + self.ColsReg.append({'name':u'\u222By.x/\u222By','al':'R' , 'm':'intyx1_scaled' , 's' :False}) + self.ColsReg.append({'name':u'\u222By.x^2' , 'al':'R' , 'm':'intyx2' , 's' :False}) + self.ColsReg.append({'name':'L_eq(m=3)' , 'al':'R' , 'f':lambda x:x.leq(m=3) , 's' :False}) + self.ColsReg.append({'name':'L_eq(m=4)' , 'al':'R' , 'f':lambda x:x.leq(m=4) , 's' :False}) + self.ColsReg.append({'name':'L_eq(m=5)' , 'al':'R' , 'f':lambda x:x.leq(m=5) , 's' :False}) + self.ColsReg.append({'name':'L_eq(m=7)' , 'al':'R' , 'f':lambda x:x.leq(m=7) , 's' :False}) + self.ColsReg.append({'name':'L_eq(m=8)' , 'al':'R' , 'f':lambda x:x.leq(m=8) , 's' :False}) + self.ColsReg.append({'name':'L_eq(m=9)' , 'al':'R' , 'f':lambda x:x.leq(m=9) , 's' :False}) + self.ColsReg.append({'name':'L_eq(m=10)' , 'al':'R' , 'f':lambda x:x.leq(m=10), 's' :False}) + self.ColsReg.append({'name':'L_eq(m=12)' , 'al':'R' , 'f':lambda x:x.leq(m=12), 's' :False}) + self.ColsReg.append({'name':'n' , 'al':'R' , 'm':'ylen' , 's' :True}) self.ColsFFT=[] - self.ColsFFT.append({'name':'Directory' , 'al':'L' , 'm':'baseDir' , 's':False}) - self.ColsFFT.append({'name':'Filename' , 'al':'L' , 'm':'fileName', 's':False}) - self.ColsFFT.append({'name':'Table' , 'al':'L' , 'm':'tabName' , 's':False}) - self.ColsFFT.append({'name':'Column' , 'al':'L' , 'm':'yName' , 's':True}) - self.ColsFFT.append({'name':'Mean' , 'al':'R' , 'm':'y0Mean' , 's' :True}) - self.ColsFFT.append({'name':'Std' , 'al':'R' , 'm':'y0Std' , 's' :True}) - self.ColsFFT.append({'name':'Min' , 'al':'R' , 'm':'y0Min' , 's' :True}) - self.ColsFFT.append({'name':'Max' , 'al':'R' , 'm':'y0Max' , 's' :True}) - self.ColsFFT.append({'name':'x@Min' , 'al':'R' , 'm':'xAtYMin' , 's' :False}) - self.ColsFFT.append({'name':'x@Max' , 'al':'R' , 'm':'xAtYMax' , 's' :False}) - self.ColsFFT.append({'name':'Mean(FFT)' , 'al':'R' , 'm':'yMean' , 's' :False}) - self.ColsFFT.append({'name':'Std(FFT)' , 'al':'R' , 'm':'yStd' , 's' :False}) - self.ColsFFT.append({'name':'Min(FFT)' , 'al':'R' , 'm':'yMin' , 's' :True}) - self.ColsFFT.append({'name':'Max(FFT)' , 'al':'R' , 'm':'yMax' , 's' :True}) - self.ColsFFT.append({'name':'Var' , 'al':'R' , 'm':'y0Var' , 's' :False}) - self.ColsFFT.append({'name':u'\u222By(FFT)' , 'al':'R' , 'm':'inty' , 's' :True}) - self.ColsFFT.append({'name':'dx(FFT)' , 'al':'R' , 'm':'dx' , 's' :True}) - self.ColsFFT.append({'name':'xMax(FFT)' , 'al':'R' , 'm':'xMax' , 's' :True}) - self.ColsFFT.append({'name':'nOvlp(FFT)' , 'al':'R' , 'f':lambda x:x.Info('LOvlp') , 's' :False}) - self.ColsFFT.append({'name':'nSeg(FFT)' , 'al':'R' , 'f':lambda x:x.Info('LSeg') , 's' :False}) - self.ColsFFT.append({'name':'nWin(FFT)' , 'al':'R' , 'f':lambda x:x.Info('LWin') , 's' :False}) - self.ColsFFT.append({'name':'nFFT(FFT)' , 'al':'R' , 'f':lambda x:x.Info('nFFT') , 's' :False}) - self.ColsFFT.append({'name':'n(FFT)' , 'al':'R' , 'm':'ylen' , 's' :True}) - self.ColsFFT.append({'name':'Meas 1' , 'al':'R' , 'm':'meas1' , 's' :False}) - self.ColsFFT.append({'name':'Meas 2' , 'al':'R' , 'm':'meas2' , 's' :False}) - self.ColsFFT.append({'name':'Mean (Meas)' , 'al':'R' , 'm':'yMeanMeas' , 's' :False}) - self.ColsFFT.append({'name':'Min (Meas)' , 'al':'R' , 'm':'yMinMeas' , 's' :False}) - self.ColsFFT.append({'name':'Max (Meas)' , 'al':'R' , 'm':'yMaxMeas' , 's' :False}) - self.ColsFFT.append({'name':'x@Min (Meas)' , 'al':'R' , 'm':'xAtYMinMeas' , 's' :False}) - self.ColsFFT.append({'name':'x@Max (Meas)' , 'al':'R' , 'm':'xAtYMaxMeas' , 's' :False}) - self.ColsFFT.append({'name':'n ' , 'al':'R' , 'm':'n0' , 's' :True}) + self.ColsFFT.append({'name':'Directory' , 'al':'L' , 'm':'baseDir' , 's' :False}) + self.ColsFFT.append({'name':'Filename' , 'al':'L' , 'm':'fileName', 's' :False}) + self.ColsFFT.append({'name':'Table' , 'al':'L' , 'm':'tabName' , 's' :False}) + self.ColsFFT.append({'name':'Column' , 'al':'L' , 'm':'yName' , 's' :True}) + self.ColsFFT.append({'name':'Mean' , 'al':'R' , 'm':'y0Mean' , 's' :True}) + self.ColsFFT.append({'name':'Std' , 'al':'R' , 'm':'y0Std' , 's' :True}) + self.ColsFFT.append({'name':'Min' , 'al':'R' , 'm':'y0Min' , 's' :True}) + self.ColsFFT.append({'name':'Max' , 'al':'R' , 'm':'y0Max' , 's' :True}) + self.ColsFFT.append({'name':'x@Min' , 'al':'R' , 'm':'xAtYMin' , 's' :False}) + self.ColsFFT.append({'name':'x@Max' , 'al':'R' , 'm':'xAtYMax' , 's' :False}) + self.ColsFFT.append({'name':'Mean(FFT)' , 'al':'R' , 'm':'yMean' , 's' :False}) + self.ColsFFT.append({'name':'Std(FFT)' , 'al':'R' , 'm':'yStd' , 's' :False}) + self.ColsFFT.append({'name':'Min(FFT)' , 'al':'R' , 'm':'yMin' , 's' :True}) + self.ColsFFT.append({'name':'Max(FFT)' , 'al':'R' , 'm':'yMax' , 's' :True}) + self.ColsFFT.append({'name':'Var' , 'al':'R' , 'm':'y0Var' , 's' :False}) + self.ColsFFT.append({'name':u'\u222By(FFT)' , 'al':'R' , 'm':'inty' , 's' :True}) + self.ColsFFT.append({'name':'dx(FFT)' , 'al':'R' , 'm':'dx' , 's' :True}) + self.ColsFFT.append({'name':'xMax(FFT)' , 'al':'R' , 'm':'xMax' , 's' :True}) + self.ColsFFT.append({'name':'nOvlp(FFT)' , 'al':'R' , 'f':lambda x:x.Info('LOvlp') , 's' :False}) + self.ColsFFT.append({'name':'nSeg(FFT)' , 'al':'R' , 'f':lambda x:x.Info('LSeg') , 's' :False}) + self.ColsFFT.append({'name':'nWin(FFT)' , 'al':'R' , 'f':lambda x:x.Info('LWin') , 's' :False}) + self.ColsFFT.append({'name':'nFFT(FFT)' , 'al':'R' , 'f':lambda x:x.Info('nFFT') , 's' :False}) + self.ColsFFT.append({'name':'n(FFT)' , 'al':'R' , 'm':'ylen' , 's' :True}) + self.ColsFFT.append({'name':'Meas 1' , 'al':'R' , 'm':'meas1' , 's' :False}) + self.ColsFFT.append({'name':'Meas 2' , 'al':'R' , 'm':'meas2' , 's' :False}) + self.ColsFFT.append({'name':'Mean (Meas)' , 'al':'R' , 'm':'yMeanMeas' , 's' :False}) + self.ColsFFT.append({'name':'Min (Meas)' , 'al':'R' , 'm':'yMinMeas' , 's' :False}) + self.ColsFFT.append({'name':'Max (Meas)' , 'al':'R' , 'm':'yMaxMeas' , 's' :False}) + self.ColsFFT.append({'name':'x@Min (Meas)' , 'al':'R' , 'm':'xAtYMinMeas' , 's' :False}) + self.ColsFFT.append({'name':'x@Max (Meas)' , 'al':'R' , 'm':'xAtYMaxMeas' , 's' :False}) + self.ColsFFT.append({'name':'n' , 'al':'R' , 'm':'n0' , 's' :True}) self.ColsMinMax=[] - self.ColsMinMax.append({'name':'Directory' , 'al':'L' , 'm':'baseDir', 's':False}) - self.ColsMinMax.append({'name':'Filename' , 'al':'L' , 'm':'fileName', 's':False}) - self.ColsMinMax.append({'name':'Table' , 'al':'L' , 'm':'tabName' , 's':False}) - self.ColsMinMax.append({'name':'Column' , 'al':'L' , 'm':'yName' , 's':True}) - self.ColsMinMax.append({'name':'Mean' , 'al':'R' , 'm':'y0Mean' , 's' :True}) - self.ColsMinMax.append({'name':'Std' , 'al':'R' , 'm':'y0Std' , 's' :True}) - self.ColsMinMax.append({'name':'Min' , 'al':'R' , 'm':'y0Min' , 's' :True}) - self.ColsMinMax.append({'name':'Max' , 'al':'R' , 'm':'y0Max' , 's' :True}) - self.ColsMinMax.append({'name':'Mean(MinMax)' , 'al':'R' , 'm':'yMean' , 's' :True}) - self.ColsMinMax.append({'name':'Std(MinMax)' , 'al':'R' , 'm':'yStd' , 's' :True}) - self.ColsMinMax.append({'name':u'\u222By(MinMax)' , 'al':'R' , 'm':'inty' , 's' :True}) - self.ColsMinMax.append({'name':u'\u222By.x(MinMax) ' , 'al':'R' , 'm':'intyx1' , 's' :False}) + self.ColsMinMax.append({'name':'Directory' , 'al':'L' , 'm':'baseDir', 's' :False}) + self.ColsMinMax.append({'name':'Filename' , 'al':'L' , 'm':'fileName', 's' :False}) + self.ColsMinMax.append({'name':'Table' , 'al':'L' , 'm':'tabName' , 's' :False}) + self.ColsMinMax.append({'name':'Column' , 'al':'L' , 'm':'yName' , 's' :True}) + self.ColsMinMax.append({'name':'Mean' , 'al':'R' , 'm':'y0Mean' , 's' :True}) + self.ColsMinMax.append({'name':'Std' , 'al':'R' , 'm':'y0Std' , 's' :True}) + self.ColsMinMax.append({'name':'Min' , 'al':'R' , 'm':'y0Min' , 's' :True}) + self.ColsMinMax.append({'name':'Max' , 'al':'R' , 'm':'y0Max' , 's' :True}) + self.ColsMinMax.append({'name':'Mean(MinMax)' , 'al':'R' , 'm':'yMean' , 's' :True}) + self.ColsMinMax.append({'name':'Std(MinMax)' , 'al':'R' , 'm':'yStd' , 's' :True}) + self.ColsMinMax.append({'name':u'\u222By(MinMax)' , 'al':'R' , 'm':'inty' , 's' :True}) + self.ColsMinMax.append({'name':u'\u222By.x(MinMax) ' , 'al':'R' , 'm':'intyx1' , 's' :False}) self.ColsMinMax.append({'name':u'\u222By.x/\u222By(MinMax)' , 'al':'R' , 'm':'intyx1_scaled' , 's' :False}) - self.ColsMinMax.append({'name':u'\u222By.x^2(MinMax)' , 'al':'R' , 'm':'intyx2' , 's' :False}) - self.ColsMinMax.append({'name':'dx(MinMax)' , 'al':'R' , 'm':'dx' , 's' :False}) - self.ColsMinMax.append({'name':'n' , 'al':'R' , 'm':'ylen' , 's' :True}) + self.ColsMinMax.append({'name':u'\u222By.x^2(MinMax)' , 'al':'R' , 'm':'intyx2' , 's' :False}) + self.ColsMinMax.append({'name':'dx(MinMax)' , 'al':'R' , 'm':'dx' , 's' :False}) + self.ColsMinMax.append({'name':'n' , 'al':'R' , 'm':'ylen' , 's' :True}) self.ColsPDF=[] - self.ColsPDF.append({'name':'Directory' , 'al':'L' , 'm':'baseDir' , 's':False}) - self.ColsPDF.append({'name':'Filename' , 'al':'L' , 'm':'fileName', 's':False}) - self.ColsPDF.append({'name':'Table' , 'al':'L' , 'm':'tabName' , 's':False}) - self.ColsPDF.append({'name':'Column' , 'al':'L' , 'm':'yName' , 's':True}) - self.ColsPDF.append({'name':'Mean' , 'al':'R' , 'm':'y0Mean' , 's' :True}) - self.ColsPDF.append({'name':'Std' , 'al':'R' , 'm':'y0Std' , 's' :True}) - self.ColsPDF.append({'name':'Min' , 'al':'R' , 'm':'y0Min' , 's' :True}) - self.ColsPDF.append({'name':'Max' , 'al':'R' , 'm':'y0Max' , 's' :True}) - self.ColsPDF.append({'name':'x@Min' , 'al':'R' , 'm':'xAtYMin' , 's' :False}) - self.ColsPDF.append({'name':'x@Max' , 'al':'R' , 'm':'xAtYMax' , 's' :False}) - self.ColsPDF.append({'name':'Mean(PDF)' , 'al':'R' , 'm':'yMean' , 's' :False}) - self.ColsPDF.append({'name':'Std(PDF)' , 'al':'R' , 'm':'yStd' , 's' :False}) - self.ColsPDF.append({'name':'Min(PDF)' , 'al':'R' , 'm':'yMin' , 's' :True}) - self.ColsPDF.append({'name':'Max(PDF)' , 'al':'R' , 'm':'yMax' , 's' :True}) - self.ColsPDF.append({'name':u'\u222By(PDF)' , 'al':'R' , 'm':'inty' , 's' :True}) - self.ColsPDF.append({'name':'Meas 1' , 'al':'R' , 'm':'meas1' , 's' :False}) - self.ColsPDF.append({'name':'Meas 2' , 'al':'R' , 'm':'meas2' , 's' :False}) - self.ColsPDF.append({'name':'Mean (Meas)' , 'al':'R' , 'm':'yMeanMeas' , 's' :False}) - self.ColsPDF.append({'name':'Min (Meas)' , 'al':'R' , 'm':'yMinMeas' , 's' :False}) - self.ColsPDF.append({'name':'Max (Meas)' , 'al':'R' , 'm':'yMaxMeas' , 's' :False}) - self.ColsPDF.append({'name':'x@Min (Meas)' , 'al':'R' , 'm':'xAtYMinMeas' , 's' :False}) - self.ColsPDF.append({'name':'x@Max (Meas)' , 'al':'R' , 'm':'xAtYMaxMeas' , 's' :False}) - self.ColsPDF.append({'name':'n(PDF)' , 'al':'R' , 'm':'ylen' , 's' :True}) + self.ColsPDF.append({'name':'Directory' , 'al':'L' , 'm':'baseDir' , 's' :False}) + self.ColsPDF.append({'name':'Filename' , 'al':'L' , 'm':'fileName', 's' :False}) + self.ColsPDF.append({'name':'Table' , 'al':'L' , 'm':'tabName' , 's' :False}) + self.ColsPDF.append({'name':'Column' , 'al':'L' , 'm':'yName' , 's' :True}) + self.ColsPDF.append({'name':'Mean' , 'al':'R' , 'm':'y0Mean' , 's' :True}) + self.ColsPDF.append({'name':'Std' , 'al':'R' , 'm':'y0Std' , 's' :True}) + self.ColsPDF.append({'name':'Min' , 'al':'R' , 'm':'y0Min' , 's' :True}) + self.ColsPDF.append({'name':'Max' , 'al':'R' , 'm':'y0Max' , 's' :True}) + self.ColsPDF.append({'name':'x@Min' , 'al':'R' , 'm':'xAtYMin' , 's' :False}) + self.ColsPDF.append({'name':'x@Max' , 'al':'R' , 'm':'xAtYMax' , 's' :False}) + self.ColsPDF.append({'name':'Mean(PDF)' , 'al':'R' , 'm':'yMean' , 's' :False}) + self.ColsPDF.append({'name':'Std(PDF)' , 'al':'R' , 'm':'yStd' , 's' :False}) + self.ColsPDF.append({'name':'Min(PDF)' , 'al':'R' , 'm':'yMin' , 's' :True}) + self.ColsPDF.append({'name':'Max(PDF)' , 'al':'R' , 'm':'yMax' , 's' :True}) + self.ColsPDF.append({'name':u'\u222By(PDF)' , 'al':'R' , 'm':'inty' , 's' :True}) + self.ColsPDF.append({'name':'Meas 1' , 'al':'R' , 'm':'meas1' , 's' :False}) + self.ColsPDF.append({'name':'Meas 2' , 'al':'R' , 'm':'meas2' , 's' :False}) + self.ColsPDF.append({'name':'Mean (Meas)' , 'al':'R' , 'm':'yMeanMeas' , 's' :False}) + self.ColsPDF.append({'name':'Min (Meas)' , 'al':'R' , 'm':'yMinMeas' , 's' :False}) + self.ColsPDF.append({'name':'Max (Meas)' , 'al':'R' , 'm':'yMaxMeas' , 's' :False}) + self.ColsPDF.append({'name':'x@Min (Meas)' , 'al':'R' , 'm':'xAtYMinMeas' , 's' :False}) + self.ColsPDF.append({'name':'x@Max (Meas)' , 'al':'R' , 'm':'xAtYMaxMeas' , 's' :False}) + self.ColsPDF.append({'name':'n(PDF)' , 'al':'R' , 'm':'ylen' , 's' :True}) self.ColsCmp=[] - self.ColsCmp.append({'name':'Directory' , 'al':'L' , 'm':'baseDir' , 's':False}) - self.ColsCmp.append({'name':'Filename' , 'al':'L' , 'm':'fileName', 's':False}) - self.ColsCmp.append({'name':'Table' , 'al':'L' , 'm':'tabName' , 's':False}) - self.ColsCmp.append({'name':'Column' , 'al':'L' , 'm':'yName' ,'s':True}) - self.ColsCmp.append({'name':'Mean(Cmp)' , 'al':'R' , 'm':'yMean' , 's' :True}) - self.ColsCmp.append({'name':'Std(Cmp)' , 'al':'R' , 'm':'yStd' , 's' :True}) - self.ColsCmp.append({'name':'Min(Cmp)' , 'al':'R' , 'm':'yMin' , 's' :True}) - self.ColsCmp.append({'name':'Max(Cmp)' , 'al':'R' , 'm':'yMax' , 's' :True}) - self.ColsCmp.append({'name':'n(Cmp)' , 'al':'R' , 'm':'ylen' , 's' :True}) + self.ColsCmp.append({'name':'Directory' , 'al':'L' , 'm':'baseDir' , 's' :False}) + self.ColsCmp.append({'name':'Filename' , 'al':'L' , 'm':'fileName', 's' :False}) + self.ColsCmp.append({'name':'Table' , 'al':'L' , 'm':'tabName' , 's' :False}) + self.ColsCmp.append({'name':'Column' , 'al':'L' , 'm':'yName' , 's' :True}) + self.ColsCmp.append({'name':'Mean(Cmp)' , 'al':'R' , 'm':'yMean' , 's' :True}) + self.ColsCmp.append({'name':'Std(Cmp)' , 'al':'R' , 'm':'yStd' , 's' :True}) + self.ColsCmp.append({'name':'Min(Cmp)' , 'al':'R' , 'm':'yMin' , 's' :True}) + self.ColsCmp.append({'name':'Max(Cmp)' , 'al':'R' , 'm':'yMax' , 's' :True}) + self.ColsCmp.append({'name':'n(Cmp)' , 'al':'R' , 'm':'ylen' , 's' :True}) + + # Select columns based on data + selectColumns(self.ColsReg , data['ColumnsRegular']) + selectColumns(self.ColsFFT , data['ColumnsFFT']) + selectColumns(self.ColsMinMax, data['ColumnsMinMax']) + selectColumns(self.ColsPDF , data['ColumnsPDF']) + selectColumns(self.ColsCmp , data['ColumnsCmp']) self.menuReg=ColCheckMenu(self) self.menuReg.setColumns(self.ColsReg) diff --git a/pydatview/GUIPlotPanel.py b/pydatview/GUIPlotPanel.py index 75ae52d..e63a5c9 100644 --- a/pydatview/GUIPlotPanel.py +++ b/pydatview/GUIPlotPanel.py @@ -270,27 +270,56 @@ def clear_measures(self): self.parent.lbDeltaY.SetLabel('') class EstheticsPanel(wx.Panel): - def __init__(self, parent): + def __init__(self, parent, data): wx.Panel.__init__(self, parent) self.parent=parent #self.SetBackgroundColour('red') + # Font lbFont = wx.StaticText( self, -1, 'Font:') - self.cbFont = wx.ComboBox(self, choices=['6','7','8','9','10','11','12','13','14','15','16','17','18'] , style=wx.CB_READONLY) - self.cbFont.SetSelection(2) + fontChoices = ['6','7','8','9','10','11','12','13','14','15','16','17','18'] + self.cbFont = wx.ComboBox(self, choices=fontChoices , style=wx.CB_READONLY) + try: + i = fontChoices.index(str(data['Font'])) + except ValueError: + i = 2 + self.cbFont.SetSelection(i) + # Legend # NOTE: we don't offer "best" since best is slow lbLegend = wx.StaticText( self, -1, 'Legend:') - self.cbLegend = wx.ComboBox(self, choices=['None','Upper right','Upper left','Lower left','Lower right','Right','Center left','Center right','Lower center','Upper center','Center'] , style=wx.CB_READONLY) - self.cbLegend.SetSelection(1) + lbChoices = ['None','Upper right','Upper left','Lower left','Lower right','Right','Center left','Center right','Lower center','Upper center','Center'] + self.cbLegend = wx.ComboBox(self, choices=lbChoices, style=wx.CB_READONLY) + try: + i = lbChoices.index(str(data['LegendPosition'])) + except ValueError: + i=1 + self.cbLegend.SetSelection(i) + # Legend Font lbLgdFont = wx.StaticText( self, -1, 'Legend font:') - self.cbLgdFont = wx.ComboBox(self, choices=['6','7','8','9','10','11','12','13','14','15','16','17','18'] , style=wx.CB_READONLY) - self.cbLgdFont.SetSelection(2) + self.cbLgdFont = wx.ComboBox(self, choices=fontChoices, style=wx.CB_READONLY) + try: + i = fontChoices.index(str(data['LegendFont'])) + except ValueError: + i = 2 + self.cbLgdFont.SetSelection(i) + # Line Width Font lbLW = wx.StaticText( self, -1, 'Line width:') - self.cbLW = wx.ComboBox(self, choices=['0.5','1.0','1.5','2.0','2.5','3.0'] , style=wx.CB_READONLY) - self.cbLW.SetSelection(2) + LWChoices = ['0.5','1.0','1.25','1.5','2.0','2.5','3.0'] + self.cbLW = wx.ComboBox(self, choices=LWChoices , style=wx.CB_READONLY) + try: + i = LWChoices.index(str(data['LineWidth'])) + except ValueError: + i = 3 + self.cbLW.SetSelection(i) + # Marker Size lbMS = wx.StaticText( self, -1, 'Marker size:') - self.cbMS= wx.ComboBox(self, choices=['0.5','1','2','3','4','5','6','7','8'] , style=wx.CB_READONLY) - self.cbMS.SetSelection(2) + MSChoices = ['0.5','1','2','3','4','5','6','7','8'] + self.cbMS= wx.ComboBox(self, choices=MSChoices, style=wx.CB_READONLY) + try: + i = MSChoices.index(str(data['MarkerSize'])) + except ValueError: + i = 2 + self.cbMS.SetSelection(i) # Layout #dummy_sizer = wx.BoxSizer(wx.HORIZONTAL) @@ -353,6 +382,12 @@ def __init__(self, parent, selPanel,infoPanel=None, mainframe=None): self.mainframe= mainframe self.plotData = [] self.plotDataOptions=dict() + try: + self.data = mainframe.data['plotPanel'] + except: + print('>>> Using default settings for plot panel') + from .appdata import defaultPlotPanelData + self.data = defaultPlotPanelData() if self.selPanel is not None: bg=self.selPanel.BackgroundColour self.SetBackgroundColour(bg) # sowhow, our parent has a wrong color @@ -399,7 +434,7 @@ def __init__(self, parent, selPanel,infoPanel=None, mainframe=None): self.cmpPanel = CompCtrlPanel(self) self.mmxPanel = MinMaxPanel(self) # --- Esthetics panel - self.esthPanel = EstheticsPanel(self) + self.esthPanel = EstheticsPanel(self, data=self.data['plotStyle']) # --- Ctrl Panel @@ -420,8 +455,9 @@ def __init__(self, parent, selPanel,infoPanel=None, mainframe=None): self.cbMeasure = wx.CheckBox(self.ctrlPanel, -1, 'Measure',(10,10)) #self.cbSub.SetValue(True) # DEFAULT TO SUB? self.cbSync.SetValue(True) - self.cbXHair.SetValue(True) # Have cross hair by default + self.cbXHair.SetValue(self.data['CrossHair']) # Have cross hair by default self.cbAutoScale.SetValue(True) + self.cbGrid.SetValue(self.data['Grid']) # Callbacks self.Bind(wx.EVT_CHECKBOX, self.redraw_event , self.cbSub ) self.Bind(wx.EVT_COMBOBOX, self.redraw_event , self.cbCurveType) diff --git a/pydatview/appdata.py b/pydatview/appdata.py new file mode 100644 index 0000000..f801565 --- /dev/null +++ b/pydatview/appdata.py @@ -0,0 +1,147 @@ +import json +import os +from weio.weio import defaultUserDataDir +from .GUICommon import Error + + +def configFilePath(): + return os.path.join(defaultUserDataDir(), 'pyDatView', 'pyDatView.json') + +def loadAppData(mainframe): + configFile = configFilePath() + os.makedirs(os.path.dirname(configFile), exist_ok=True) + data = defaultAppData(mainframe) + #print('>>> configFile', configFile) + #print('Default Data content:\n') + #for k,v in data.items(): + # print('{:20s}: {}\n'.format(k,v)) + if os.path.exists(configFile): + sError='' + try: + with open(configFile) as f: + data2 = json.load(f) + except: + sError='Error: pyDatView config file is not properly formatted.\n\n' + sError+='The config file was at the following location:\n {}\n\n'.format(configFile) + + configFileBkp = configFile+'_bkp' + import shutil + try: + shutil.copy2(configFile, configFileBkp) + sError+='A backup of the file was made at the following location:\n {}\n\n'.format(configFileBkp) + backup=True + except: + backup=False + if backup: + try: + os.remove(configFile) + except: + sError+='To solve this issue, the config file was deleted.\n\n' + else: + sError+='A backup of the file could not be made and the file was not deleted\n\n' + sError+='If the problem persists, post an issue on the github repository\n' + #raise Exception(sError) + Error(mainframe, sError) + if len(sError)==0: + # Merging only what overlaps between default and user file + # --- Level 1 + for k1,v1 in data2.items(): + if k1 in data.keys(): + if type(data[k1]) is dict: + # --- Level 2 + for k2,v2 in v1.items(): + if k2 in data[k1].keys(): + if type(data[k1][k2]) is dict: + # --- Level 3 + for k3,v3 in v2.items(): + if k3 in data[k1][k2].keys(): + data[k1][k2][k3]=v3 + else: + data[k1][k2]=v2 + else: + data[k1]=v1 + #print('Data content on load:\n') + #for k,v in data.items(): + # print('{:20s}: {}\n'.format(k,v)) + return data + +def saveAppData(mainFrame, data): + from .GUICommon import getFontSize, getMonoFontSize + if not mainFrame.datareset: + # --- Import data from GUI + data['fontSize'] = int(getFontSize()) + data['monoFontSize'] = int(getMonoFontSize()) + if hasattr(mainFrame, 'plotPanel'): + savePlotPanelData(data['plotPanel'], mainFrame.plotPanel) + if hasattr(mainFrame, 'plotPanel'): + saveInfoPanelData(data['infoPanel'], mainFrame.infoPanel) + + # --- Write config file + configFile = configFilePath() + #print('>>> Writing configFile', configFile) + #print('Data content on close:\n') + #for k,v in data.items(): + # print('{:20s}: {}\n'.format(k,v)) + try: + os.makedirs(os.path.dirname(configFile), exist_ok=True) + with open(configFile, 'w') as f: + json.dump(data, f, indent=2) + except: + pass + +def defaultAppData(mainframe): + data={} + # --- Main frame data + data['windowSize'] = (900,700) + data['monoFontSize'] = mainframe.systemFontSize + data['fontSize'] = mainframe.systemFontSize + #SIDE_COL = [160,160,300,420,530] + #SIDE_COL_LARGE = [200,200,360,480,600] + #BOT_PANL =85 + data['plotPanel']=defaultPlotPanelData() + data['infoPanel']=defaultInfoPanelData() + return data + +# --- Plot Panel +# TODO put this into plotPanel file? +def savePlotPanelData(data, plotPanel): + data['Grid'] = plotPanel.cbGrid.IsChecked() + data['CrossHair'] = plotPanel.cbXHair.IsChecked() + data['plotStyle']['Font'] = plotPanel.esthPanel.cbFont.GetValue() + data['plotStyle']['LegendFont'] = plotPanel.esthPanel.cbLgdFont.GetValue() + data['plotStyle']['LegendPosition'] = plotPanel.esthPanel.cbLegend.GetValue() + data['plotStyle']['LineWidth'] = plotPanel.esthPanel.cbLW.GetValue() + data['plotStyle']['MarkerSize'] = plotPanel.esthPanel.cbMS.GetValue() + +def defaultPlotPanelData(): + data={} + data['CrossHair']=True + data['Grid']=False + plotStyle = dict() + plotStyle['Font'] = '11' + plotStyle['LegendFont'] = '11' + plotStyle['LegendPosition'] = 'Upper right' + plotStyle['LineWidth'] = '1.5' + plotStyle['MarkerSize'] = '2' + data['plotStyle']= plotStyle + return data + +# --- Info Panel +# TODO put this into infoPanel file? +def saveInfoPanelData(data, infoPanel): + data['ColumnsRegular'] = [c['name'] for c in infoPanel.ColsReg if c['s']] + data['ColumnsFFT'] = [c['name'] for c in infoPanel.ColsFFT if c['s']] + data['ColumnsMinMax'] = [c['name'] for c in infoPanel.ColsMinMax if c['s']] + data['ColumnsPDF'] = [c['name'] for c in infoPanel.ColsPDF if c['s']] + data['ColumnsCmp'] = [c['name'] for c in infoPanel.ColsCmp if c['s']] + +def defaultInfoPanelData(): + data={} + data['ColumnsRegular'] = ['Column','Mean','Std','Min','Max','Range','dx','n'] + data['ColumnsFFT'] = ['Column','Mean','Std','Min','Max','Min(FFT)','Max(FFT)',u'\u222By(FFT)','dx(FFT)','xMax(FFT)','n(FFT)','n'] + data['ColumnsMinMax'] = ['Column','Mean','Std','Min','Max','Mean(MinMax)','Std(MinMax)',u'\u222By(MinMax)','n'] + data['ColumnsPDF'] = ['Column','Mean','Std','Min','Max','Min(PDF)','Max(PDF)',u'\u222By(PDF)','n(PDF)'] + data['ColumnsCmp'] = ['Column','Mean(Cmp)','Std(Cmp)','Min(Cmp)','Max(Cmp)','n(Cmp)'] + return data + + diff --git a/pydatview/common.py b/pydatview/common.py index c389fcd..59f1340 100644 --- a/pydatview/common.py +++ b/pydatview/common.py @@ -145,34 +145,6 @@ def extract_key_num(text): regex = re.compile(r'(?P[\w\-]+)=(?P[0-9+epinf.-]*?)($|,)') return {match.group("key"): np.float(match.group("value")) for match in regex.finditer(text.replace(' ',''))} -# --------------------------------------------------------------------------------} -# --- -# --------------------------------------------------------------------------------{ -# def getMonoFontAbs(): -# import wx -# #return wx.Font(9, wx.MODERN, wx.NORMAL, wx.NORMAL, False, u'Monospace') -# if os.name=='nt': -# font=wx.Font(9, wx.TELETYPE, wx.NORMAL, wx.NORMAL, False) -# elif os.name=='posix': -# font=wx.Font(10, wx.TELETYPE, wx.NORMAL, wx.NORMAL, False) -# else: -# font=wx.Font(8, wx.TELETYPE, wx.NORMAL, wx.NORMAL, False) -# return font -# -# def getMonoFont(widget): -# import wx -# font = widget.GetFont() -# font.SetFamily(wx.TELETYPE) -# if platform.system()=='Windows': -# pass -# elif platform.system()=='Linux': -# pass -# elif platform.system()=='Darwin': -# font.SetPointSize(font.GetPointSize()-1) -# else: -# pass -# return font - def getDt(x): """ returns dt in s """ def myisnat(dt): diff --git a/pydatview/main.py b/pydatview/main.py index 783da36..ab8f426 100644 --- a/pydatview/main.py +++ b/pydatview/main.py @@ -6,6 +6,9 @@ import numpy as np import os.path +import sys +import traceback +import gc try: import pandas as pd except: @@ -20,9 +23,6 @@ sys.exit(-1) #raise -import sys -import traceback -import gc # GUI import wx @@ -38,13 +38,6 @@ # Pluggins from .plugins import dataPlugins - - -# --------------------------------------------------------------------------------} -# --- GLOBAL -# --------------------------------------------------------------------------------{ -PROG_NAME='pyDatView' -PROG_VERSION='v0.2-local' try: import weio # File Formats and File Readers except: @@ -57,7 +50,13 @@ print(' git clone --recurse-submodules https://github.com/ebranlard/pyDatView\n') sys.exit(-1) +from .appdata import loadAppData, saveAppData, configFilePath, defaultAppData +# --------------------------------------------------------------------------------} +# --- GLOBAL +# --------------------------------------------------------------------------------{ +PROG_NAME='pyDatView' +PROG_VERSION='v0.2-local' SIDE_COL = [160,160,300,420,530] SIDE_COL_LARGE = [200,200,360,480,600] BOT_PANL =85 @@ -101,15 +100,21 @@ def OnDropFiles(self, x, y, filenames): # --- Main Frame # --------------------------------------------------------------------------------{ class MainFrame(wx.Frame): - def __init__(self, filename=None): + def __init__(self, data=None): # Parent constructor wx.Frame.__init__(self, None, -1, PROG_NAME+' '+PROG_VERSION) - # Data + # Hooking exceptions to display them to the user + sys.excepthook = MyExceptionHook + # --- Data self.tabList=TableList() self.restore_formulas = [] + self.systemFontSize = self.GetFont().GetPointSize() + self.data = loadAppData(self) + self.datareset = False + # Global variables... + setFontSize(self.data['fontSize']) + setMonoFontSize(self.data['monoFontSize']) - # Hooking exceptions to display them to the user - sys.excepthook = MyExceptionHook # --- GUI #font = self.GetFont() #print(font.GetFamily(),font.GetStyle(),font.GetPointSize()) @@ -119,6 +124,7 @@ def __init__(self, filename=None): #font.SetPointSize(8) #print(font.GetFamily(),font.GetStyle(),font.GetPointSize()) #self.SetFont(font) + self.SetFont(getFont(self)) # --- Menu menuBar = wx.MenuBar() @@ -152,9 +158,11 @@ def __init__(self, filename=None): helpMenu = wx.Menu() aboutMenuItem = helpMenu.Append(wx.NewId(), 'About', 'About') + resetMenuItem = helpMenu.Append(wx.NewId(), 'Reset options', 'Rest options') menuBar.Append(helpMenu, "&Help") self.SetMenuBar(menuBar) - self.Bind(wx.EVT_MENU,self.onAbout,aboutMenuItem) + self.Bind(wx.EVT_MENU,self.onAbout, aboutMenuItem) + self.Bind(wx.EVT_MENU,self.onReset, resetMenuItem) self.FILE_FORMATS, errors= weio.fileFormats(ignoreErrors=True, verbose=False) @@ -220,10 +228,11 @@ def __init__(self, filename=None): self.FrameSizer.Add(self.MainPanel,1, flag=wx.EXPAND,border=0) self.SetSizer(self.FrameSizer) - self.SetSize((900, 700)) + self.SetSize(self.data['windowSize']) self.Center() self.Show() self.Bind(wx.EVT_SIZE, self.OnResizeWindow) + self.Bind(wx.EVT_CLOSE, self.onClose) # Shortcuts idFilter=wx.NewId() @@ -234,16 +243,11 @@ def __init__(self, filename=None): ) self.SetAcceleratorTable(accel_tbl) - def printString(self, event=None, string=''): - print('>>> string',string) - def onFilter(self,event): if hasattr(self,'selPanel'): self.selPanel.colPanel1.tFilter.SetFocus() event.Skip() - - def clean_memory(self,bReload=False): #print('Clean memory') # force Memory cleanup @@ -330,7 +334,7 @@ def load_tabs_into_GUI(self, bReload=False, bAdd=False, bPlot=True): self.selPanel = SelectionPanel(self.vSplitter, self.tabList, mode=mode, mainframe=self) self.tSplitter = wx.SplitterWindow(self.vSplitter) #self.tSplitter.SetMinimumPaneSize(20) - self.infoPanel = InfoPanel(self.tSplitter) + self.infoPanel = InfoPanel(self.tSplitter, data=self.data['infoPanel']) self.plotPanel = PlotPanel(self.tSplitter, self.selPanel, self.infoPanel, self) self.tSplitter.SetSashGravity(0.9) self.tSplitter.SplitHorizontally(self.plotPanel, self.infoPanel) @@ -502,7 +506,11 @@ def redraw(self): # self.infoPanel.showStats(self.plotPanel.plotData,self.plotPanel.pltTypePanel.plotType()) def onExit(self, event): - self.Close() + self.Close() + + def onClose(self, event): + saveAppData(self, self.data) + event.Skip() def cleanGUI(self, event=None): if hasattr(self,'plotPanel'): @@ -525,11 +533,27 @@ def onSave(self, event=None): def onAbout(self, event=None): defaultDir = weio.defaultUserDataDir() # TODO input file options - Info(self,PROG_NAME+' '+PROG_VERSION+'\n\n' - 'pyDatView data directory:\n {}\n'.format(os.path.join(defaultDir,'pyDatView'))+ + About(self,PROG_NAME+' '+PROG_VERSION+'\n\n' + 'pyDatView config file:\n {}\n'.format(configFilePath())+ 'weio data directory: \n {}\n'.format(os.path.join(defaultDir,'weio'))+ '\n\nVisit http://github.com/ebranlard/pyDatView for documentation.') + def onReset (self, event=None): + configFile = configFilePath() + result = YesNo(self, + 'The options of pyDatView will be reset to default.\nThe changes will be noticeable the next time you open pyDatView.\n\n'+ + 'This action will overwrite the user settings file:\n {}\n\n'.format(configFile)+ + 'pyDatView will then close.\n\n' + 'Are you sure you want to continue?', caption = 'Reset settings?') + if result: + try: + os.remove(configFile) + except: + pass + self.data = defaultAppData(self) + self.datareset = True + self.onExit(event=None) + def onReload(self, event=None): filenames, fileformats = self.tabList.filenames_and_formats if len(filenames)>0: From 415d95f2b61fe62b3d9e9fb014150e3c5e627075 Mon Sep 17 00:00:00 2001 From: Emmanuel Branlard Date: Mon, 14 Feb 2022 18:01:15 -0700 Subject: [PATCH 26/36] Fix issue with drag and drop with specific file format --- pydatview/main.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pydatview/main.py b/pydatview/main.py index ab8f426..540d527 100644 --- a/pydatview/main.py +++ b/pydatview/main.py @@ -89,7 +89,7 @@ def OnDropFiles(self, x, y, filenames): if iFormat==0: # auto-format Format = None else: - Format = parent.FILE_FORMATS[iFormat-1] + Format = self.parent.FILE_FORMATS[iFormat-1] self.parent.load_files(filenames, fileformats=[Format]*len(filenames), bAdd=bAdd) return True From d48c8aaec456039bb2b9ea96b31b1fdecc12c54c Mon Sep 17 00:00:00 2001 From: Emmanuel Branlard Date: Thu, 17 Feb 2022 18:30:47 -0700 Subject: [PATCH 27/36] Filter: ability to combine with other tools (Fixes #99) --- pydatview/GUITools.py | 1 - 1 file changed, 1 deletion(-) diff --git a/pydatview/GUITools.py b/pydatview/GUITools.py index 5c96168..1db94dd 100644 --- a/pydatview/GUITools.py +++ b/pydatview/GUITools.py @@ -251,7 +251,6 @@ def __init__(self, parent): self.onToggleCompute(init=True) def destroy(self,event=None): - self.parent.plotDataOptions['Filter']=None super(FilterToolPanel,self).destroy() From 14572ca01ab6e47e4acbaa9b8cbf8e665a0324ea Mon Sep 17 00:00:00 2001 From: Emmanuel Branlard Date: Thu, 17 Feb 2022 19:11:43 -0700 Subject: [PATCH 28/36] Filter: possibility to add new dataset (Fixes #99) --- pydatview/GUITools.py | 140 ++++++++++++++++++++++++++++---------- pydatview/Tables.py | 36 +++++++++- pydatview/main.py | 1 + pydatview/plotdata.py | 3 +- pydatview/tools/signal.py | 13 +++- 5 files changed, 152 insertions(+), 41 deletions(-) diff --git a/pydatview/GUITools.py b/pydatview/GUITools.py index 1db94dd..3ff9d27 100644 --- a/pydatview/GUITools.py +++ b/pydatview/GUITools.py @@ -179,7 +179,7 @@ def onMDChangeChar(self, event): # --------------------------------------------------------------------------------} -# --- Moving Average +# --- Filter Tool # --------------------------------------------------------------------------------{ class FilterToolPanel(GUIToolPanel): """ @@ -200,36 +200,46 @@ def __init__(self, parent): self._filterApplied = type(self.parent.plotDataOptions['Filter'])==dict - btClose = self.getBtBitmap(self,'Close','close',self.destroy) - self.btClear = self.getBtBitmap(self, 'Clear Plot','sun' , self.onClear) - self.btComp = self.getToggleBtBitmap(self,'Apply','cloud',self.onToggleCompute) - self.btPlot = self.getBtBitmap(self, 'Plot' ,'chart' , self.onPlot) + self.btClose = self.getBtBitmap(self,'Close','close',self.destroy) + self.btAdd = self.getBtBitmap(self, 'Add','add' , self.onAdd) + self.btHelp = self.getBtBitmap(self, 'Help','help', self.onHelp) + self.btClear = self.getBtBitmap(self, 'Clear Plot','sun' , self.onClear) + self.btApply = self.getToggleBtBitmap(self,'Apply','cloud',self.onToggleApply) + self.btPlot = self.getBtBitmap(self, 'Plot' ,'chart' , self.onPlot) + + self.cbTabs = wx.ComboBox(self, -1, choices=[], style=wx.CB_READONLY) - lb1 = wx.StaticText(self, -1, 'Filter:') self.cbFilters = wx.ComboBox(self, choices=[filt['name'] for filt in self._DEFAULT_FILTERS], style=wx.CB_READONLY) self.lbParamName = wx.StaticText(self, -1, ' :') self.cbFilters.SetSelection(0) - #self.tParam = wx.TextCtrl(self, wx.ID_ANY,, size = (30,-1), style=wx.TE_PROCESS_ENTER) self.tParam = wx.SpinCtrlDouble(self, value='11', size=wx.Size(60,-1)) self.lbInfo = wx.StaticText( self, -1, '') # --- Layout btSizer = wx.FlexGridSizer(rows=3, cols=2, hgap=2, vgap=0) - btSizer.Add(btClose ,0,flag = wx.ALL|wx.EXPAND, border = 1) - btSizer.Add(self.btClear ,0,flag = wx.ALL|wx.EXPAND, border = 1) - btSizer.Add(self.btComp,0,flag = wx.ALL|wx.EXPAND, border = 1) - btSizer.Add(self.btPlot ,0,flag = wx.ALL|wx.EXPAND, border = 1) - #btSizer.Add(btHelp ,0,flag = wx.ALL|wx.EXPAND, border = 1) + btSizer.Add(self.btClose , 0, flag = wx.ALL|wx.EXPAND, border = 1) + btSizer.Add(self.btClear , 0, flag = wx.ALL|wx.EXPAND, border = 1) + btSizer.Add(self.btAdd , 0, flag = wx.ALL|wx.EXPAND, border = 1) + btSizer.Add(self.btPlot , 0, flag = wx.ALL|wx.EXPAND, border = 1) + btSizer.Add(self.btHelp , 0, flag = wx.ALL|wx.EXPAND, border = 1) + btSizer.Add(self.btApply , 0, flag = wx.ALL|wx.EXPAND, border = 1) + + + horzSizerT = wx.BoxSizer(wx.HORIZONTAL) + horzSizerT.Add(wx.StaticText(self, -1, 'Table:') , 0, wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.LEFT, 5) + horzSizerT.Add(self.cbTabs , 0, wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.LEFT, 1) horzSizer = wx.BoxSizer(wx.HORIZONTAL) - horzSizer.Add(lb1 ,0,flag = wx.LEFT|wx.CENTER,border = 5) - horzSizer.Add(self.cbFilters ,0,flag = wx.LEFT|wx.CENTER,border = 1) - horzSizer.Add(self.lbParamName ,0,flag = wx.LEFT|wx.CENTER,border = 5) - horzSizer.Add(self.tParam ,0,flag = wx.LEFT|wx.CENTER,border = 1) + horzSizer.Add(wx.StaticText(self, -1, 'Filter:') , 0, wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.LEFT, 5) + horzSizer.Add(self.cbFilters , 0, wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.LEFT, 1) + horzSizer.Add(self.lbParamName , 0, wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.LEFT, 5) + horzSizer.Add(self.tParam , 0, wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL|wx.LEFT, 1) + vertSizer = wx.BoxSizer(wx.VERTICAL) vertSizer.Add(self.lbInfo ,0, flag = wx.LEFT ,border = 5) + vertSizer.Add(horzSizerT ,1, flag = wx.LEFT|wx.EXPAND,border = 1) vertSizer.Add(horzSizer ,1, flag = wx.LEFT|wx.EXPAND,border = 1) self.sizer = wx.BoxSizer(wx.HORIZONTAL) @@ -238,6 +248,7 @@ def __init__(self, parent): self.SetSizer(self.sizer) # --- Events + self.cbTabs.Bind (wx.EVT_COMBOBOX, self.onTabChange) self.cbFilters.Bind(wx.EVT_COMBOBOX, self.onSelectFilt) self.Bind(wx.EVT_SPINCTRLDOUBLE, self.onParamChangeArrow, self.tParam) self.Bind(wx.EVT_TEXT_ENTER, self.onParamChangeEnter, self.tParam) @@ -247,13 +258,14 @@ def __init__(self, parent): assert isinstance(self.spintxt, wx.TextCtrl) self.spintxt.Bind(wx.EVT_CHAR_HOOK, self.onParamChangeChar) + # --- Init triggers + self.updateTabList() self.onSelectFilt() - self.onToggleCompute(init=True) + self.onToggleApply(init=True) def destroy(self,event=None): super(FilterToolPanel,self).destroy() - def onSelectFilt(self, event=None): """ Select the filter, but does not applied it to the plotData parentFilt is unchanged @@ -272,7 +284,16 @@ def onSelectFilt(self, event=None): else: self.tParam.SetValue(filt['param']) - def onToggleCompute(self, event=None, init=False): + def _GUI2Data(self): + iFilt = self.cbFilters.GetSelection() + filt = self._DEFAULT_FILTERS[iFilt].copy() + try: + filt['param']=np.float(self.spintxt.Value) + except: + print('[WARN] pyDatView: Issue on Mac: GUITools.py/_GUI2Data. Help needed.') + return filt + + def onToggleApply(self, event=None, init=False): """ apply Filter based on GUI Data """ @@ -281,14 +302,14 @@ def onToggleCompute(self, event=None, init=False): self._filterApplied = not self._filterApplied if self._filterApplied: - self.parent.plotDataOptions['Filter'] =self._GUI2Filt() + self.parent.plotDataOptions['Filter'] =self._GUI2Data() #print('Apply', self.parent.plotDataOptions['Filter']) self.lbInfo.SetLabel( 'Filter is now applied on the fly. Change parameter live. Click "Clear" to stop. ' ) self.btPlot.Enable(False) self.btClear.Enable(False) - self.btComp.SetLabel(CHAR['sun']+' Clear') + self.btApply.SetLabel(CHAR['sun']+' Clear') else: self.parent.plotDataOptions['Filter'] = None self.lbInfo.SetLabel( @@ -297,21 +318,29 @@ def onToggleCompute(self, event=None, init=False): ) self.btPlot.Enable(True) self.btClear.Enable(True) - self.btComp.SetLabel(CHAR['cloud']+' Apply') + self.btApply.SetLabel(CHAR['cloud']+' Apply') if not init: self.parent.load_and_draw() # Data will change + self.updateTabList() - pass + def onAdd(self,event=None): + iSel = self.cbTabs.GetSelection() + tabList = self.parent.selPanel.tabList + mainframe = self.parent.mainframe + icol, colname = self.parent.selPanel.xCol + opt = self._GUI2Data() + errors=[] + if iSel==0: + dfs, names, errors = tabList.applyFiltering(icol, opt, bAdd=True) + mainframe.load_dfs(dfs,names,bAdd=True) + else: + df, name = tabList.get(iSel-1).applyFiltering(icol, opt, bAdd=True) + mainframe.load_df(df,name,bAdd=True) + self.updateTabList() - def _GUI2Filt(self): - iFilt = self.cbFilters.GetSelection() - filt = self._DEFAULT_FILTERS[iFilt].copy() - try: - filt['param']=np.float(self.spintxt.Value) - except: - print('[WARN] pyDatView: Issue on Mac: GUITools.py/_GUI2Filt. Help needed.') - return filt + if len(errors)>0: + raise Exception('Error: The resampling failed on some tables:\n\n'+'\n'.join(errors)) def onPlot(self, event=None): """ @@ -321,7 +350,7 @@ def onPlot(self, event=None): if len(self.parent.plotData)!=1: Error(self,'Plotting only works for a single plot. Plot less data.') return - filt=self._GUI2Filt() + filt=self._GUI2Data() PD = self.parent.plotData[0] y_filt = applyFilter(PD.x0, PD.y0, filt) @@ -338,7 +367,7 @@ def onClear(self, event): def onParamChange(self, event=None): if self._filterApplied: - self.parent.plotDataOptions['Filter'] =self._GUI2Filt() + self.parent.plotDataOptions['Filter'] =self._GUI2Data() #print('OnParamChange', self.parent.plotDataOptions['Filter']) self.parent.load_and_draw() # Data will change @@ -358,6 +387,46 @@ def onParamChangeChar(self, event): self.tParam.SetValue(self.spintxt.Value) self.onParamChangeEnter(event) + def onTabChange(self,event=None): + #tabList = self.parent.selPanel.tabList + #iSel=self.cbTabs.GetSelection() + pass + + def updateTabList(self,event=None): + tabList = self.parent.selPanel.tabList + tabListNames = ['All opened tables']+tabList.getDisplayTabNames() + try: + iSel=np.max([np.min([self.cbTabs.GetSelection(),len(tabListNames)]),0]) + self.cbTabs.Clear() + [self.cbTabs.Append(tn) for tn in tabListNames] + self.cbTabs.SetSelection(iSel) + except RuntimeError: + pass + + def onHelp(self,event=None): + Info(self,"""Filtering. + +The filtering operation changes the "y" values of a table/plot, +applying a given filter (typically cutting off some frequencies). + +To filter perform the following step: + +- Chose a filtering method: + - Moving average: apply a moving average filter, with + a length specified by the window size (in indices) + - High pass 1st order: apply a first oder high-pass filter, + passing the frequencies above the cutoff frequency parameter. + - Low pass 1st order: apply a first oder low-pass filter, + passing the frequencies below the cutoff frequency parameter. + +- Click on one of the following buttons: + - Plot: will display the filtered data on the figure + - Apply: will perform the filtering on the fly for all new plots + - Add: will create new table(s) with filtered values for all + signals. This process might take some time. + Currently done for all tables. +""") + # --------------------------------------------------------------------------------} # --- Resample @@ -379,10 +448,10 @@ def __init__(self, parent): # --- GUI elements self.btClose = self.getBtBitmap(self, 'Close','close', self.destroy) self.btAdd = self.getBtBitmap(self, 'Add','add' , self.onAdd) - self.btPlot = self.getBtBitmap(self, 'Plot' ,'chart' , self.onPlot) + self.btHelp = self.getBtBitmap(self, 'Help','help', self.onHelp) self.btClear = self.getBtBitmap(self, 'Clear Plot','sun', self.onClear) + self.btPlot = self.getBtBitmap(self, 'Plot' ,'chart' , self.onPlot) self.btApply = self.getToggleBtBitmap(self,'Apply','cloud',self.onToggleApply) - self.btHelp = self.getBtBitmap(self, 'Help','help', self.onHelp) #self.lb = wx.StaticText( self, -1, """ Click help """) self.cbTabs = wx.ComboBox(self, -1, choices=[], style=wx.CB_READONLY) @@ -509,7 +578,6 @@ def onToggleApply(self, event=None, init=False): self.parent.load_and_draw() # Data will change self.setCurrentX() - def onAdd(self,event=None): iSel = self.cbTabs.GetSelection() tabList = self.parent.selPanel.tabList diff --git a/pydatview/Tables.py b/pydatview/Tables.py index 88fb00a..cd433a4 100644 --- a/pydatview/Tables.py +++ b/pydatview/Tables.py @@ -259,7 +259,7 @@ def applyCommonMaskString(self,maskString,bAdd=True): return dfs_new, names_new, errors - # --- Resampling and other actions + # --- Resampling TODO MOVE THIS OUT OF HERE OR UNIFY def applyResampling(self,iCol,sampDict,bAdd=True): dfs_new = [] names_new = [] @@ -273,7 +273,22 @@ def applyResampling(self,iCol,sampDict,bAdd=True): names_new.append(name_new) # except: # errors.append('Resampling failed for table: '+t.active_name) # TODO + return dfs_new, names_new, errors + # --- Filtering TODO MOVE THIS OUT OF HERE OR UNIFY + def applyFiltering(self,iCol,options,bAdd=True): + dfs_new = [] + names_new = [] + errors=[] + for i,t in enumerate(self._tabs): +# try: + df_new, name_new = t.applyFiltering(iCol, options, bAdd=bAdd) + if df_new is not None: + # we don't append when string is empty + dfs_new.append(df_new) + names_new.append(name_new) +# except: +# errors.append('Resampling failed for table: '+t.active_name) # TODO return dfs_new, names_new, errors @@ -429,8 +444,8 @@ def applyMaskString(self,maskString,bAdd=True): raise Exception('Error: The mask failed for table: '+self.name) return df_new, name_new - # --- Important manipulation - def applyResampling(self,iCol,sampDict,bAdd=True): + # --- Important manipulation TODO MOVE THIS OUT OF HERE OR UNIFY + def applyResampling(self, iCol, sampDict, bAdd=True): from pydatview.tools.signal import applySamplerDF if iCol==0: raise Exception('Cannot resample based on index') @@ -444,6 +459,21 @@ def applyResampling(self,iCol,sampDict,bAdd=True): self.data=df_new return df_new, name_new + def applyFiltering(self, iCol, options, bAdd=True): + from pydatview.tools.signal import applyFilterDF + if iCol==0: + raise Exception('Cannot filter based on index') + colName=self.data.columns[iCol-1] + df_new =applyFilterDF(self.data, colName, options) + df_new + if bAdd: + name_new=self.raw_name+'_filtered' + else: + name_new=None + self.data=df_new + return df_new, name_new + + def radialAvg(self,avgMethod, avgParam): import pydatview.fast.fastlib as fastlib import pydatview.fast.fastfarm as fastfarm diff --git a/pydatview/main.py b/pydatview/main.py index 540d527..b4409c0 100644 --- a/pydatview/main.py +++ b/pydatview/main.py @@ -374,6 +374,7 @@ def load_tabs_into_GUI(self, bReload=False, bAdd=False, bPlot=True): except: pass # Hack + #self.onShowTool(tool='Filter') #self.onShowTool(tool='Resample') #self.onDataPlugin(toolName='Bin data') diff --git a/pydatview/plotdata.py b/pydatview/plotdata.py index 17cb2f2..19b1d06 100644 --- a/pydatview/plotdata.py +++ b/pydatview/plotdata.py @@ -70,8 +70,9 @@ def fromXY(PD, x, y, sx='', sy=''): def _post_init(PD, Options={}): # --- Perform data manipulation on the fly - #print(Options) + #[print(k,v) for k,v in Options.items()] keys=Options.keys() + # TODO setup an "Order" if 'RemoveOutliers' in keys: if Options['RemoveOutliers']: from pydatview.tools.signal import reject_outliers diff --git a/pydatview/tools/signal.py b/pydatview/tools/signal.py index be7011f..e32da6e 100644 --- a/pydatview/tools/signal.py +++ b/pydatview/tools/signal.py @@ -234,7 +234,7 @@ def highpass1(y, dt, fc=3) : return y_filt -def applyFilter(x, y,filtDict): +def applyFilter(x, y, filtDict): if filtDict['name']=='Moving average': return moving_average(y, n=np.round(filtDict['param']).astype(int)) elif filtDict['name']=='Low pass 1st order': @@ -246,6 +246,17 @@ def applyFilter(x, y,filtDict): else: raise NotImplementedError('{}'.format(filtDict)) +def applyFilterDF(df_old, x_col, options): + """ apply filter on a dataframe """ + # Brute force loop + df_new = df_old.copy() + x = df_new[x_col] + for (colName, colData) in df_new.iteritems(): + if colName != x_col: + df_new[colName] = applyFilter(x, colData, options) + return df_new + + # --------------------------------------------------------------------------------} # --- # --------------------------------------------------------------------------------{ From 09daa79eec58592cde493a42fbca6601155a96ed Mon Sep 17 00:00:00 2001 From: Emmanuel Branlard Date: Thu, 17 Feb 2022 21:10:20 -0700 Subject: [PATCH 29/36] FFT: option to average using n points per decade (Fixed #75) --- pydatview/GUIPlotPanel.py | 61 +- pydatview/plotdata.py | 4 +- pydatview/tools/spectral.py | 2012 ++++++++++++++++++----------------- 3 files changed, 1098 insertions(+), 979 deletions(-) diff --git a/pydatview/GUIPlotPanel.py b/pydatview/GUIPlotPanel.py index e63a5c9..d80e08c 100644 --- a/pydatview/GUIPlotPanel.py +++ b/pydatview/GUIPlotPanel.py @@ -113,7 +113,7 @@ def __init__(self, parent): self.cbType = wx.ComboBox(self, choices=['PSD','f x PSD','Amplitude'] , style=wx.CB_READONLY) self.cbType.SetSelection(0) lbAveraging = wx.StaticText( self, -1, 'Avg.:') - self.cbAveraging = wx.ComboBox(self, choices=['None','Welch'] , style=wx.CB_READONLY) + self.cbAveraging = wx.ComboBox(self, choices=['None','Welch','Binning'] , style=wx.CB_READONLY) self.cbAveraging.SetSelection(1) self.lbAveragingMethod = wx.StaticText( self, -1, 'Window:') self.cbAveragingMethod = wx.ComboBox(self, choices=['Hamming','Hann','Rectangular'] , style=wx.CB_READONLY) @@ -121,7 +121,9 @@ def __init__(self, parent): self.lbP2 = wx.StaticText( self, -1, '2^n:') self.scP2 = wx.SpinCtrl(self, value='11',size=wx.Size(40,-1)) self.lbWinLength = wx.StaticText( self, -1, '(2048) ') - self.scP2.SetRange(3, 19) + self.scP2.SetRange(3, 50) + self.previousNExp = 8 + self.previousNDec = 20 lbMaxFreq = wx.StaticText( self, -1, 'Xlim:') self.tMaxFreq = wx.TextCtrl(self,size = (30,-1),style=wx.TE_PROCESS_ENTER) self.tMaxFreq.SetValue("-1") @@ -155,13 +157,36 @@ def __init__(self, parent): def onXlimChange(self,event=None): self.parent.redraw_same_data(); def onSpecCtrlChange(self,event=None): + if self.cbAveraging.GetStringSelection()=='None': + self.scP2.Enable(False) + self.cbAveragingMethod.Enable(False) + self.lbP2.SetLabel('') + self.lbWinLength.SetLabel('') + elif self.cbAveraging.GetStringSelection()=='Binning': + self.previousNExp= self.scP2.GetValue() + self.scP2.SetValue(self.previousNDec) + self.scP2.Enable(True) + self.cbAveragingMethod.Enable(False) + self.lbP2.SetLabel('n:') + self.lbWinLength.SetLabel('') + else: + self.previousDec= self.scP2.GetValue() + self.scP2.SetValue(self.previousNExp) + self.lbP2.SetLabel('2^n:') + self.scP2.Enable(True) + self.cbAveragingMethod.Enable(True) + self.onP2ChangeText(event=None) self.parent.load_and_draw() # Data changes + def onDetrendChange(self,event=None): self.parent.load_and_draw() # Data changes def onP2ChangeText(self,event=None): - nExp=self.scP2.GetValue() - self.updateP2(nExp) + if self.cbAveraging.GetStringSelection()=='Binning': + pass + else: + nExp=self.scP2.GetValue() + self.updateP2(nExp) self.parent.load_and_draw() # Data changes def updateP2(self,P2): @@ -683,7 +708,7 @@ def onMouseRelease(self, event): self.rightMeasure.plot(ax, ax_idx) else: return - if self.cbAutoScale.IsChecked() is False: + if not self.cbAutoScale.IsChecked(): self._restore_limits() if self.leftMeasure.axis_idx == self.rightMeasure.axis_idx and self.leftMeasure.axis_idx != -1: @@ -770,9 +795,10 @@ def setPD_FFT(self,pd): avgWindow = self.spcPanel.cbAveragingMethod.GetStringSelection() bDetrend = self.spcPanel.cbDetrend.IsChecked() nExp = self.spcPanel.scP2.GetValue() + nPerDecade = self.spcPanel.scP2.GetValue() # Convert plotdata to FFT data try: - Info = pd.toFFT(yType=yType, xType=xType, avgMethod=avgMethod, avgWindow=avgWindow, bDetrend=bDetrend, nExp=nExp) + Info = pd.toFFT(yType=yType, xType=xType, avgMethod=avgMethod, avgWindow=avgWindow, bDetrend=bDetrend, nExp=nExp, nPerDecade=nPerDecade) # Trigger if hasattr(Info,'nExp') and Info.nExp!=nExp: self.spcPanel.scP2.SetValue(Info.nExp) @@ -1013,18 +1039,21 @@ def plot_all(self, keep_limits=True): # XLIM - TODO FFT ONLY NASTY if self.pltTypePanel.cbFFT.GetValue(): try: - xlim=float(self.spcPanel.tMaxFreq.GetLineText(0)) - if xlim>0: - ax_left.set_xlim([0,xlim]) - pd=PD[ax_left.iPD[0]] - I=pd.x0: + ax_left.set_xlim([0,xlim]) + pd=PD[ax_left.iPD[0]] + I=pd.xn: - print('[WARN] Power of 2 value was too high and was reduced. Disable averaging to use the full spectrum.'); - nExp=int(np.log(nFFTAll)/np.log(2))-1 - nPerSeg=2**nExp - if averaging_window=='hamming': - window = hamming(nPerSeg, True)# True=Symmetric, like matlab - elif averaging_window=='hann': - window = hann(nPerSeg, True) - elif averaging_window=='rectangular': - window = boxcar(nPerSeg) - else: - raise Exception('Averaging window unknown {}'.format(averaging_window)) - frq, PSD, Info = pwelch(y, fs=Fs, window=window, detrend=detrend) - Info.nExp = nExp - else: - raise Exception('Averaging method unknown {}'.format(averaging)) - - # --- Formatting output - if output_type=='amplitude': - deltaf = frq[1]-frq[0] - Y = np.sqrt(PSD*2*deltaf) - # NOTE: the above should be the same as:Y=abs(Y[range(nhalf)])/n;Y[1:-1]=Y[1:-1]*2; - elif output_type=='psd': # one sided - Y = PSD - elif output_type=='f x psd': - Y = PSD*frq - else: - raise NotImplementedError('Contact developer') - if detrend: - frq= frq[1:] - Y = Y[1:] - return frq, Y, Info - - - -# --------------------------------------------------------------------------------} -# --- Spectral simple (averaging below) -# --------------------------------------------------------------------------------{ -def fft_amplitude(y, fs=1.0, detrend ='constant', return_onesided=True): - """ Returns FFT amplitude of signal """ - frq, PSD, Info = psd(y, fs=fs, detrend=detrend, return_onesided=return_onesided) - deltaf = frq[1]-frq[0] - Y = np.sqrt(PSD*2*deltaf) - return frq, Y, Info - -def psd(y, fs=1.0, detrend ='constant', return_onesided=True): - """ Perform PSD without averaging """ - if not return_onesided: - raise NotImplementedError('Double sided todo') - - if detrend is None: - detrend=False - - if detrend=='constant' or detrend==True: - m=np.mean(y); - else: - m=0; - - n = len(y) - if n%2==0: - nhalf = int(n/2+1) - else: - nhalf = int((n+1)/2) - - frq = np.arange(nhalf)*fs/n; - Y = np.fft.rfft(y-m) #Y = np.fft.fft(y) - PSD = abs(Y[range(nhalf)])**2 /(n*fs) # PSD - PSD[1:-1] = PSD[1:-1]*2; - class InfoClass(): - pass - Info = InfoClass(); - Info.df = frq[1]-frq[0] - Info.fMax = frq[-1] - Info.LFreq = len(frq) - Info.LSeg = len(Y) - Info.LWin = len(Y) - Info.LOvlp = 0 - Info.nFFT = len(Y) - Info.nseg = 1 - return frq, PSD, Info - - -# --------------------------------------------------------------------------------} -# --- Windows -# --------------------------------------------------------------------------------{ -"""The suite of window functions.""" -def fnextpow2(x): - return 2**np.ceil( np.log(x)*0.99999999999/np.log(2)); - -def fDefaultWinLen(x,overlap_frac=0.5): - return fnextpow2(np.sqrt(len(x)/(1-overlap_frac))) - -def fDefaultWinLenMatlab(x): - return np.fix((len(x)-3)*2./9.) - -def _len_guards(M): - """Handle small or incorrect window lengths""" - if int(M) != M or M < 0: - raise ValueError('Window length M must be a non-negative integer') - return M <= 1 - -def _extend(M, sym): - """Extend window by 1 sample if needed for DFT-even symmetry""" - if not sym: - return M + 1, True - else: - return M, False - -def _truncate(w, needed): - """Truncate window by 1 sample if needed for DFT-even symmetry""" - if needed: - return w[:-1] - else: - return w - -def general_cosine(M, a, sym=True): - if _len_guards(M): - return np.ones(M) - M, needs_trunc = _extend(M, sym) - - fac = np.linspace(-np.pi, np.pi, M) - w = np.zeros(M) - for k in range(len(a)): - w += a[k] * np.cos(k * fac) - - return _truncate(w, needs_trunc) - - -def boxcar(M, sym=True): - """Return a boxcar or rectangular window. - - Also known as a rectangular window or Dirichlet window, this is equivalent - to no window at all. - """ - if _len_guards(M): - return np.ones(M) - M, needs_trunc = _extend(M, sym) - - w = np.ones(M, float) - - return _truncate(w, needs_trunc) - -def hann(M, sym=True): # same as hanning(*args, **kwargs): - return general_hamming(M, 0.5, sym) - - -def general_hamming(M, alpha, sym=True): - r"""Return a generalized Hamming window. - The generalized Hamming window is constructed by multiplying a rectangular - window by one period of a cosine function [1]_. - w(n) = \alpha - \left(1 - \alpha\right) \cos\left(\frac{2\pi{n}}{M-1}\right) - \qquad 0 \leq n \leq M-1 - """ - return general_cosine(M, [alpha, 1. - alpha], sym) - - -def hamming(M, sym=True): - r"""Return a Hamming window. - The Hamming window is a taper formed by using a raised cosine with - non-zero endpoints, optimized to minimize the nearest side lobe. - w(n) = 0.54 - 0.46 \cos\left(\frac{2\pi{n}}{M-1}\right) - \qquad 0 \leq n \leq M-1 - """ - return general_hamming(M, 0.54, sym) - -_win_equiv_raw = { - ('boxcar', 'box', 'ones', 'rect', 'rectangular'): (boxcar, False), - ('hamming', 'hamm', 'ham'): (hamming, False), - ('hanning', 'hann', 'han'): (hann, False), -} - -# Fill dict with all valid window name strings -_win_equiv = {} -for k, v in _win_equiv_raw.items(): - for key in k: - _win_equiv[key] = v[0] - -# Keep track of which windows need additional parameters -_needs_param = set() -for k, v in _win_equiv_raw.items(): - if v[1]: - _needs_param.update(k) - - -def get_window(window, Nx, fftbins=True): - """ - Return a window. - - Parameters - ---------- - window : string, float, or tuple - The type of window to create. See below for more details. - Nx : int - The number of samples in the window. - fftbins : bool, optional - If True (default), create a "periodic" window, ready to use with - `ifftshift` and be multiplied by the result of an FFT (see also - `fftpack.fftfreq`). - If False, create a "symmetric" window, for use in filter design. - """ - sym = not fftbins - try: - beta = float(window) - except (TypeError, ValueError): - args = () - if isinstance(window, tuple): - winstr = window[0] - if len(window) > 1: - args = window[1:] - elif isinstance(window, string_types): - if window in _needs_param: - raise ValueError("The '" + window + "' window needs one or " - "more parameters -- pass a tuple.") - else: - winstr = window - else: - raise ValueError("%s as window type is not supported." % - str(type(window))) - - try: - winfunc = _win_equiv[winstr] - except KeyError: - raise ValueError("Unknown window type.") - - params = (Nx,) + args + (sym,) - else: - winfunc = kaiser - params = (Nx, beta, sym) - - return winfunc(*params) - - - - - - -# --------------------------------------------------------------------------------} -# --- Helpers -# --------------------------------------------------------------------------------{ -def odd_ext(x, n, axis=-1): - """ - Odd extension at the boundaries of an array - Generate a new ndarray by making an odd extension of `x` along an axis. - """ - if n < 1: - return x - if n > x.shape[axis] - 1: - raise ValueError(("The extension length n (%d) is too big. " + - "It must not exceed x.shape[axis]-1, which is %d.") - % (n, x.shape[axis] - 1)) - left_end = axis_slice(x, start=0, stop=1, axis=axis) - left_ext = axis_slice(x, start=n, stop=0, step=-1, axis=axis) - right_end = axis_slice(x, start=-1, axis=axis) - right_ext = axis_slice(x, start=-2, stop=-(n + 2), step=-1, axis=axis) - ext = np.concatenate((2 * left_end - left_ext, - x, - 2 * right_end - right_ext), - axis=axis) - return ext - - -def even_ext(x, n, axis=-1): - """ - Even extension at the boundaries of an array - Generate a new ndarray by making an even extension of `x` along an axis. - """ - if n < 1: - return x - if n > x.shape[axis] - 1: - raise ValueError(("The extension length n (%d) is too big. " + - "It must not exceed x.shape[axis]-1, which is %d.") - % (n, x.shape[axis] - 1)) - left_ext = axis_slice(x, start=n, stop=0, step=-1, axis=axis) - right_ext = axis_slice(x, start=-2, stop=-(n + 2), step=-1, axis=axis) - ext = np.concatenate((left_ext, - x, - right_ext), - axis=axis) - return ext - - -def const_ext(x, n, axis=-1): - """ - Constant extension at the boundaries of an array - Generate a new ndarray that is a constant extension of `x` along an axis. - The extension repeats the values at the first and last element of - the axis. - """ - if n < 1: - return x - left_end = axis_slice(x, start=0, stop=1, axis=axis) - ones_shape = [1] * x.ndim - ones_shape[axis] = n - ones = np.ones(ones_shape, dtype=x.dtype) - left_ext = ones * left_end - right_end = axis_slice(x, start=-1, axis=axis) - right_ext = ones * right_end - ext = np.concatenate((left_ext, - x, - right_ext), - axis=axis) - return ext - - -def zero_ext(x, n, axis=-1): - """ - Zero padding at the boundaries of an array - Generate a new ndarray that is a zero padded extension of `x` along - an axis. - """ - if n < 1: - return x - zeros_shape = list(x.shape) - zeros_shape[axis] = n - zeros = np.zeros(zeros_shape, dtype=x.dtype) - ext = np.concatenate((zeros, x, zeros), axis=axis) - return ext - -def signaltools_detrend(data, axis=-1, type='linear', bp=0): - """ - Remove linear trend along axis from data. - - Parameters - ---------- - data : array_like - The input data. - axis : int, optional - The axis along which to detrend the data. By default this is the - last axis (-1). - type : {'linear', 'constant'}, optional - The type of detrending. If ``type == 'linear'`` (default), - the result of a linear least-squares fit to `data` is subtracted - from `data`. - If ``type == 'constant'``, only the mean of `data` is subtracted. - bp : array_like of ints, optional - A sequence of break points. If given, an individual linear fit is - performed for each part of `data` between two break points. - Break points are specified as indices into `data`. - - Returns - ------- - ret : ndarray - The detrended input data. - """ - if type not in ['linear', 'l', 'constant', 'c']: - raise ValueError("Trend type must be 'linear' or 'constant'.") - data = np.asarray(data) - dtype = data.dtype.char - if dtype not in 'dfDF': - dtype = 'd' - if type in ['constant', 'c']: - #print('Removing mean') - ret = data - np.expand_dims(np.mean(data, axis), axis) - return ret - else: - #print('Removing linear?') - dshape = data.shape - N = dshape[axis] - bp = sort(unique(r_[0, bp, N])) - if np.any(bp > N): - raise ValueError("Breakpoints must be less than length " - "of data along given axis.") - Nreg = len(bp) - 1 - # Restructure data so that axis is along first dimension and - # all other dimensions are collapsed into second dimension - rnk = len(dshape) - if axis < 0: - axis = axis + rnk - newdims = r_[axis, 0:axis, axis + 1:rnk] - newdata = reshape(np.transpose(data, tuple(newdims)), - (N, _prod(dshape) // N)) - newdata = newdata.copy() # make sure we have a copy - if newdata.dtype.char not in 'dfDF': - newdata = newdata.astype(dtype) - # Find leastsq fit and remove it for each piece - for m in range(Nreg): - Npts = bp[m + 1] - bp[m] - A = ones((Npts, 2), dtype) - A[:, 0] = cast[dtype](np.arange(1, Npts + 1) * 1.0 / Npts) - sl = slice(bp[m], bp[m + 1]) - coef, resids, rank, s = np.linalg.lstsq(A, newdata[sl]) - newdata[sl] = newdata[sl] - dot(A, coef) - # Put data back in original shape. - tdshape = take(dshape, newdims, 0) - ret = np.reshape(newdata, tuple(tdshape)) - vals = list(range(1, rnk)) - olddims = vals[:axis] + [0] + vals[axis:] - ret = np.transpose(ret, tuple(olddims)) - return ret - - - -# --------------------------------------------------------------------------------} -# --- Spectral Averaging -# --------------------------------------------------------------------------------{ -"""Tools for spectral analysis. """ - -def welch(x, fs=1.0, window='hann', nperseg=None, noverlap=None, nfft=None, - detrend='constant', return_onesided=True, scaling='density', - axis=-1): - """Interface identical to scipy.signal """ - - if detrend==True: - detrend='constant' - - freqs, Pxx = csd(x, x, fs, window, nperseg, noverlap, nfft, detrend, return_onesided, scaling, axis) - return freqs, Pxx.real - -#>>>> -def pwelch(x, window='hamming', noverlap=None, nfft=None, fs=1.0, nperseg=None, - detrend=False, return_onesided=True, scaling='density', - axis=-1): - r""" - NOTE: interface and default options modified to match matlab's implementation - >> detrend: default to False - >> window : default to 'hamming' - >> window: if an integer, use 'hamming(window, sym=True)' - - - Estimate power spectral density using Welch's method. - - Welch's method [1]_ computes an estimate of the power spectral - density by dividing the data into overlapping segments, computing a - modified periodogram for each segment and averaging the - periodograms. - - Parameters - ---------- - x : array_like - Time series of measurement values - fs : float, optional - Sampling frequency of the `x` time series. Defaults to 1.0. - window : str or tuple or array_like, optional - Desired window to use. If `window` is a string or tuple, it is - passed to `get_window` to generate the window values, which are - DFT-even by default. See `get_window` for a list of windows and - required parameters. If `window` is array_like it will be used - directly as the window and its length must be nperseg. Defaults - to a Hann window. - nperseg : int, optional - Length of each segment. Defaults to None, but if window is str or - tuple, is set to 256, and if window is array_like, is set to the - length of the window. - noverlap : int, optional - Number of points to overlap between segments. If `None`, - ``noverlap = nperseg // 2``. Defaults to `None`. - nfft : int, optional - Length of the FFT used, if a zero padded FFT is desired. If - `None`, the FFT length is `nperseg`. Defaults to `None`. - detrend : str or function or `False`, optional - Specifies how to detrend each segment. If `detrend` is a - string, it is passed as the `type` argument to the `detrend` - function. If it is a function, it takes a segment and returns a - detrended segment. If `detrend` is `False`, no detrending is - done. Defaults to 'constant'. - return_onesided : bool, optional - If `True`, return a one-sided spectrum for real data. If - `False` return a two-sided spectrum. Note that for complex - data, a two-sided spectrum is always returned. - scaling : { 'density', 'spectrum' }, optional - Selects between computing the power spectral density ('density') - where `Pxx` has units of V**2/Hz and computing the power - spectrum ('spectrum') where `Pxx` has units of V**2, if `x` - is measured in V and `fs` is measured in Hz. Defaults to - 'density' - axis : int, optional - Axis along which the periodogram is computed; the default is - over the last axis (i.e. ``axis=-1``). - - Returns - ------- - f : ndarray - Array of sample frequencies. - Pxx : ndarray - Power spectral density or power spectrum of x. - - See Also - -------- - periodogram: Simple, optionally modified periodogram - lombscargle: Lomb-Scargle periodogram for unevenly sampled data - - Notes - ----- - An appropriate amount of overlap will depend on the choice of window - and on your requirements. For the default Hann window an overlap of - 50% is a reasonable trade off between accurately estimating the - signal power, while not over counting any of the data. Narrower - windows may require a larger overlap. - - If `noverlap` is 0, this method is equivalent to Bartlett's method - [2]_. - - .. versionadded:: 0.12.0 - - References - ---------- - .. [1] P. Welch, "The use of the fast Fourier transform for the - estimation of power spectra: A method based on time averaging - over short, modified periodograms", IEEE Trans. Audio - Electroacoust. vol. 15, pp. 70-73, 1967. - .. [2] M.S. Bartlett, "Periodogram Analysis and Continuous Spectra", - Biometrika, vol. 37, pp. 1-16, 1950. - - """ - import math - def fnextpow2(x): - return 2**math.ceil( math.log(x)*0.99999999999/math.log(2)); - - # MANU >>> CHANGE OF DEFAULT OPTIONS - # MANU - If a length is provided use symmetric hamming window - if type(window)==int: - window=hamming(window, True) - # MANU - do not use 256 as default - if isinstance(window, string_types) or isinstance(window, tuple): - if nperseg is None: - if noverlap is None: - overlap_frac=0.5 - elif noverlap == 0: - overlap_frac=0 - else: - raise NotImplementedError('TODO noverlap set but not nperseg') - #nperseg = 256 # then change to default - nperseg=fnextpow2(math.sqrt(x.shape[-1]/(1-overlap_frac))); - - # MANU accepting true as detrend - if detrend==True: - detrend='constant' - - freqs, Pxx, Info = csd(x, x, fs, window, nperseg, noverlap, nfft, detrend, - return_onesided, scaling, axis) - - return freqs, Pxx.real, Info - - -def csd(x, y, fs=1.0, window='hann', nperseg=None, noverlap=None, nfft=None, - detrend='constant', return_onesided=True, scaling='density', axis=-1): - r""" - Estimate the cross power spectral density, Pxy, using Welch's - method. - """ - - freqs, _, Pxy, Info = _spectral_helper(x, y, fs, window, nperseg, noverlap, nfft, - detrend, return_onesided, scaling, axis, - mode='psd') - - # Average over windows. - if len(Pxy.shape) >= 2 and Pxy.size > 0: - if Pxy.shape[-1] > 1: - Pxy = Pxy.mean(axis=-1) - else: - Pxy = np.reshape(Pxy, Pxy.shape[:-1]) - - return freqs, Pxy, Info - - - -def coherence(x, y, fs=1.0, window='hann', nperseg=None, noverlap=None, - nfft=None, detrend='constant', axis=-1): - r""" - Estimate the magnitude squared coherence estimate, Cxy, of - discrete-time signals X and Y using Welch's method. - - ``Cxy = abs(Pxy)**2/(Pxx*Pyy)``, where `Pxx` and `Pyy` are power - spectral density estimates of X and Y, and `Pxy` is the cross - spectral density estimate of X and Y. - """ - - freqs, Pxx, Infoxx = welch(x, fs, window, nperseg, noverlap, nfft, detrend, axis=axis) - _, Pyy, Infoyy = welch(y, fs, window, nperseg, noverlap, nfft, detrend, axis=axis) - _, Pxy, Infoxy = csd(x, y, fs, window, nperseg, noverlap, nfft, detrend, axis=axis) - - Cxy = np.abs(Pxy)**2 / Pxx / Pyy - - return freqs, Cxy, Infoxx - - -def _spectral_helper(x, y, fs=1.0, window='hann', nperseg=None, noverlap=None, - nfft=None, detrend='constant', return_onesided=True, - scaling='spectrum', axis=-1, mode='psd', boundary=None, - padded=False): - """ Calculate various forms of windowed FFTs for PSD, CSD, etc. """ - if mode not in ['psd', 'stft']: - raise ValueError("Unknown value for mode %s, must be one of: " - "{'psd', 'stft'}" % mode) - - - - - - boundary_funcs = {'even': even_ext, - 'odd': odd_ext, - 'constant': const_ext, - 'zeros': zero_ext, - None: None} - - if boundary not in boundary_funcs: - raise ValueError("Unknown boundary option '{0}', must be one of: {1}" - .format(boundary, list(boundary_funcs.keys()))) - - # If x and y are the same object we can save ourselves some computation. - same_data = y is x - - if not same_data and mode != 'psd': - raise ValueError("x and y must be equal if mode is 'stft'") - - axis = int(axis) - - # Ensure we have np.arrays, get outdtype - x = np.asarray(x) - if not same_data: - y = np.asarray(y) - outdtype = np.result_type(x, y, np.complex64) - else: - outdtype = np.result_type(x, np.complex64) - - if not same_data: - # Check if we can broadcast the outer axes together - xouter = list(x.shape) - youter = list(y.shape) - xouter.pop(axis) - youter.pop(axis) - try: - outershape = np.broadcast(np.empty(xouter), np.empty(youter)).shape - except ValueError: - raise ValueError('x and y cannot be broadcast together.') - - if same_data: - if x.size == 0: - return np.empty(x.shape), np.empty(x.shape), np.empty(x.shape) - else: - if x.size == 0 or y.size == 0: - outshape = outershape + (min([x.shape[axis], y.shape[axis]]),) - emptyout = np.rollaxis(np.empty(outshape), -1, axis) - return emptyout, emptyout, emptyout - - if x.ndim > 1: - if axis != -1: - x = np.rollaxis(x, axis, len(x.shape)) - if not same_data and y.ndim > 1: - y = np.rollaxis(y, axis, len(y.shape)) - - # Check if x and y are the same length, zero-pad if necessary - if not same_data: - if x.shape[-1] != y.shape[-1]: - if x.shape[-1] < y.shape[-1]: - pad_shape = list(x.shape) - pad_shape[-1] = y.shape[-1] - x.shape[-1] - x = np.concatenate((x, np.zeros(pad_shape)), -1) - else: - pad_shape = list(y.shape) - pad_shape[-1] = x.shape[-1] - y.shape[-1] - y = np.concatenate((y, np.zeros(pad_shape)), -1) - - if nperseg is not None: # if specified by user - nperseg = int(nperseg) - if nperseg < 1: - raise ValueError('nperseg must be a positive integer') - - # parse window; if array like, then set nperseg = win.shape - win, nperseg = _triage_segments(window, nperseg,input_length=x.shape[-1]) - - if nfft is None: - nfft = nperseg - elif nfft < nperseg: - raise ValueError('nfft must be greater than or equal to nperseg.') - else: - nfft = int(nfft) - - if noverlap is None: - noverlap = nperseg//2 - else: - noverlap = int(noverlap) - if noverlap >= nperseg: - raise ValueError('noverlap must be less than nperseg.') - nstep = nperseg - noverlap - - # Padding occurs after boundary extension, so that the extended signal ends - # in zeros, instead of introducing an impulse at the end. - # I.e. if x = [..., 3, 2] - # extend then pad -> [..., 3, 2, 2, 3, 0, 0, 0] - # pad then extend -> [..., 3, 2, 0, 0, 0, 2, 3] - - if boundary is not None: - ext_func = boundary_funcs[boundary] - x = ext_func(x, nperseg//2, axis=-1) - if not same_data: - y = ext_func(y, nperseg//2, axis=-1) - - if padded: - # Pad to integer number of windowed segments - # I.e make x.shape[-1] = nperseg + (nseg-1)*nstep, with integer nseg - nadd = (-(x.shape[-1]-nperseg) % nstep) % nperseg - zeros_shape = list(x.shape[:-1]) + [nadd] - x = np.concatenate((x, np.zeros(zeros_shape)), axis=-1) - if not same_data: - zeros_shape = list(y.shape[:-1]) + [nadd] - y = np.concatenate((y, np.zeros(zeros_shape)), axis=-1) - - # Handle detrending and window functions - if not detrend: - def detrend_func(d): - return d - elif not hasattr(detrend, '__call__'): - def detrend_func(d): - return signaltools_detrend(d, type=detrend, axis=-1) - elif axis != -1: - # Wrap this function so that it receives a shape that it could - # reasonably expect to receive. - def detrend_func(d): - d = np.rollaxis(d, -1, axis) - d = detrend(d) - return np.rollaxis(d, axis, len(d.shape)) - else: - detrend_func = detrend - - if np.result_type(win,np.complex64) != outdtype: - win = win.astype(outdtype) - - if scaling == 'density': - scale = 1.0 / (fs * (win*win).sum()) - elif scaling == 'spectrum': - scale = 1.0 / win.sum()**2 - else: - raise ValueError('Unknown scaling: %r' % scaling) - - if mode == 'stft': - scale = np.sqrt(scale) - - if return_onesided: - if np.iscomplexobj(x): - sides = 'twosided' - #warnings.warn('Input data is complex, switching to ' 'return_onesided=False') - else: - sides = 'onesided' - if not same_data: - if np.iscomplexobj(y): - sides = 'twosided' - #warnings.warn('Input data is complex, switching to return_onesided=False') - else: - sides = 'twosided' - - if sides == 'twosided': - raise Exception('NOT IMPLEMENTED') - #freqs = fftpack.fftfreq(nfft, 1/fs) - elif sides == 'onesided': - freqs = np.fft.rfftfreq(nfft, 1/fs) - - # Perform the windowed FFTs - result = _fft_helper(x, win, detrend_func, nperseg, noverlap, nfft, sides) - - if not same_data: - # All the same operations on the y data - result_y = _fft_helper(y, win, detrend_func, nperseg, noverlap, nfft, - sides) - result = np.conjugate(result) * result_y - elif mode == 'psd': - result = np.conjugate(result) * result - - result *= scale - if sides == 'onesided' and mode == 'psd': - if nfft % 2: - result[..., 1:] *= 2 - else: - # Last point is unpaired Nyquist freq point, don't double - result[..., 1:-1] *= 2 - - time = np.arange(nperseg/2, x.shape[-1] - nperseg/2 + 1, - nperseg - noverlap)/float(fs) - if boundary is not None: - time -= (nperseg/2) / fs - - result = result.astype(outdtype) - - # All imaginary parts are zero anyways - if same_data and mode != 'stft': - result = result.real - - # Output is going to have new last axis for time/window index, so a - # negative axis index shifts down one - if axis < 0: - axis -= 1 - - # Roll frequency axis back to axis where the data came from - result = np.rollaxis(result, -1, axis) - - # TODO - class InfoClass(): - pass - Info = InfoClass(); - Info.df=freqs[1]-freqs[0] - Info.fMax=freqs[-1] - Info.LFreq=len(freqs) - Info.LSeg=nperseg - Info.LWin=len(win) - Info.LOvlp=noverlap - Info.nFFT=nfft - Info.nseg=-1 - #print('df:{:.3f} - fm:{:.2f} - nseg:{} - Lf:{:5d} - Lseg:{:5d} - Lwin:{:5d} - Lovlp:{:5d} - Nfft:{:5d} - Lsig:{}'.format(freqs[1]-freqs[0],freqs[-1],-1,len(freqs),nperseg,len(win),noverlap,nfft,x.shape[-1])) - return freqs, time, result, Info - - -def _fft_helper(x, win, detrend_func, nperseg, noverlap, nfft, sides): - """ Calculate windowed FFT """ - # Created strided array of data segments - if nperseg == 1 and noverlap == 0: - result = x[..., np.newaxis] - else: - # http://stackoverflow.com/a/5568169 - step = nperseg - noverlap - shape = x.shape[:-1]+((x.shape[-1]-noverlap)//step, nperseg) - strides = x.strides[:-1]+(step*x.strides[-1], x.strides[-1]) - result = np.lib.stride_tricks.as_strided(x, shape=shape, - strides=strides) - - # Detrend each data segment individually - result = detrend_func(result) - - # Apply window by multiplication - result = win * result - - # Perform the fft. Acts on last axis by default. Zero-pads automatically - if sides == 'twosided': - raise Exception('NOT IMPLEMENTED') - #func = fftpack.fft - else: - result = result.real - func = np.fft.rfft - result = func(result, n=nfft) - - return result - -def _triage_segments(window, nperseg,input_length): - """ - Parses window and nperseg arguments for spectrogram and _spectral_helper. - This is a helper function, not meant to be called externally. - """ - - #parse window; if array like, then set nperseg = win.shape - if isinstance(window, string_types) or isinstance(window, tuple): - # if nperseg not specified - if nperseg is None: - nperseg = 256 # then change to default - if nperseg > input_length: - print('nperseg = {0:d} is greater than input length ' - ' = {1:d}, using nperseg = {1:d}' - .format(nperseg, input_length)) - nperseg = input_length - win = get_window(window, nperseg) - else: - win = np.asarray(window) - if len(win.shape) != 1: - raise ValueError('window must be 1-D') - if input_length < win.shape[-1]: - raise ValueError('window is longer than input signal') - if nperseg is None: - nperseg = win.shape[0] - elif nperseg is not None: - if nperseg != win.shape[0]: - raise ValueError("value specified for nperseg is different from" - " length of window") - - return win, nperseg - - - - - - -# --------------------------------------------------------------------------------} -# --- Unittests -# --------------------------------------------------------------------------------{ -import unittest - -class TestSpectral(unittest.TestCase): - - def test_fft_amplitude(self): - dt=0.1 - t=np.arange(0,10,dt); - f0=1; - A=5; - y=A*np.sin(2*np.pi*f0*t) - f,Y,_=fft_amplitude(y,fs=1/dt,detrend=False) - i=np.argmax(Y) - self.assertAlmostEqual(Y[i],A) - self.assertAlmostEqual(f[i],f0) - -if __name__ == '__main__': - unittest.main() - +# Tools for spectral analysis of a real valued signal. +# +# The functions in this file were adapted from the python package scipy according to the following license: +# +# License: +# Copyright 2001, 2002 Enthought, Inc. +# All rights reserved. +# +# Copyright 2003-2013 SciPy Developers. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: +# +# Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. +# Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. +# Neither the name of Enthought nor the names of the SciPy Developers may be used to endorse or promote products derived from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +from __future__ import division, print_function, absolute_import + +import numpy as np +import pandas as pd +from six import string_types + +__all__ = ['fft_wrap','welch', 'psd', 'fft_amplitude'] +__all__ += ['pwelch', 'csd', 'coherence'] +__all__ += ['fnextpow2'] +__all__ += ['hann','hamming','boxcar','general_hamming','get_window'] +__all__ += ['TestSpectral'] + + +# --------------------------------------------------------------------------------} +# --- FFT wrap +# --------------------------------------------------------------------------------{ +def fft_wrap(t,y,dt=None, output_type='amplitude',averaging='None',averaging_window='hamming',detrend=False,nExp=None, nPerDecade=None): + """ + Wrapper to compute FFT amplitude or power spectra, with averaging. + INPUTS: + output_type : amplitude, PSD, f x PSD + averaging : None, Welch, Binning + averaging_window : Hamming, Hann, Rectangular + OUTPUTS: + frq: vector of frequencies + Y : Amplitude spectrum, PSD, or f * PSD + Info: a dictionary of info values + """ + + # Formatting inputs + output_type = output_type.lower() + averaging = averaging.lower() + averaging_window = averaging_window.lower() + y = np.asarray(y) + y = y[~np.isnan(y)] + n = len(y) + + if dt is None: + dtDelta0 = t[1]-t[0] + # Hack to use a constant dt + dt = (np.max(t)-np.min(t))/(n-1) + if dtDelta0 !=dt: + print('[WARN] dt from tmax-tmin different from dt from t2-t1' ) + Fs = 1/dt + if averaging =='none': + frq, PSD, Info = psd(y, fs=Fs, detrend=detrend, return_onesided=True) + elif averaging =='binning': + frq, PSD, Info = psd_binned(y, fs=Fs, detrend=detrend, return_onesided=True, nPerDecade=nPerDecade) + elif averaging=='welch': + # --- Welch - PSD + #overlap_frac=0.5 + #return fnextpow2(np.sqrt(len(x)/(1-overlap_frac))) + nFFTAll=fnextpow2(n) + if nExp is None: + nExp=int(np.log(nFFTAll)/np.log(2))-1 + nPerSeg=2**nExp + if nPerSeg>n: + print('[WARN] Power of 2 value was too high and was reduced. Disable averaging to use the full spectrum.'); + nExp=int(np.log(nFFTAll)/np.log(2))-1 + nPerSeg=2**nExp + if averaging_window=='hamming': + window = hamming(nPerSeg, True)# True=Symmetric, like matlab + elif averaging_window=='hann': + window = hann(nPerSeg, True) + elif averaging_window=='rectangular': + window = boxcar(nPerSeg) + else: + raise Exception('Averaging window unknown {}'.format(averaging_window)) + frq, PSD, Info = pwelch(y, fs=Fs, window=window, detrend=detrend) + Info.nExp = nExp + else: + raise Exception('Averaging method unknown {}'.format(averaging)) + + # --- Formatting output + if output_type=='amplitude': + deltaf = frq[1]-frq[0] + Y = np.sqrt(PSD*2*deltaf) + # NOTE: the above should be the same as:Y=abs(Y[range(nhalf)])/n;Y[1:-1]=Y[1:-1]*2; + elif output_type=='psd': # one sided + Y = PSD + elif output_type=='f x psd': + Y = PSD*frq + else: + raise NotImplementedError('Contact developer') + if detrend: + frq= frq[1:] + Y = Y[1:] + return frq, Y, Info + + + +# --------------------------------------------------------------------------------} +# --- Spectral simple (averaging below) +# --------------------------------------------------------------------------------{ +def fft_amplitude(y, fs=1.0, detrend ='constant', return_onesided=True): + """ Returns FFT amplitude of signal """ + frq, PSD, Info = psd(y, fs=fs, detrend=detrend, return_onesided=return_onesided) + deltaf = frq[1]-frq[0] + Y = np.sqrt(PSD*2*deltaf) + return frq, Y, Info + + +def psd_binned(y, fs=1.0, nPerDecade=10, detrend ='constant', return_onesided=True): + """ + Return PSD binned with nPoints per decade + """ + # --- First return regular PSD + frq, PSD, Info = psd(y, fs=fs, detrend=detrend, return_onesided=return_onesided) + + add0=False + if frq[0]==0: + add0=True + f0 = 0 + PSD0 = PSD[0] + frq=frq[1:] + PSD=PSD[1:] + + # -- Then bin per decase + log_f = np.log10(frq) + ndecades = np.ceil(log_f[-1] -log_f[0]) + xbins = np.linspace(log_f[0], log_f[-1], int(ndecades*nPerDecade)) + + # Using Pandas to bin.. + df = pd.DataFrame(data=np.column_stack((log_f,PSD)), columns=['x','y']) + xmid = (xbins[:-1]+xbins[1:])/2 + df['Bin'] = pd.cut(df['x'], bins=xbins, labels=xmid ) # Adding a column that has bin attribute + df2 = df.groupby('Bin').mean() # Average by bin + df2 = df2.reindex(xmid) + log_f_bin = df2['x'].values + PSD_bin = df2['y'].values + frq2= 10**log_f_bin + PSD2= PSD_bin + if add0: + frq2=np.concatenate( ([f0 ], frq2) ) + PSD2=np.concatenate( ([PSD0], PSD2) ) + b = ~np.isnan(frq2) + frq2 = frq2[b] + PSD2 = PSD2[b] + + #import matplotlib.pyplot as plt + #fig,ax = plt.subplots(1, 1, sharey=False, figsize=(6.4,4.8)) # (6.4,4.8) + #fig.subplots_adjust(left=0.12, right=0.95, top=0.95, bottom=0.11, hspace=0.20, wspace=0.20) + #ax.plot(log_f, PSD, label='') + #ax.plot(log_f_bin, PSD_bin, 'o', label='') + #for x in xbins: + # ax.axvline(x, ls=':', c=(0.5,0.5,0.5)) + #ax.set_xlabel('') + #ax.set_ylabel('') + #ax.legend() + #plt.show() + + #Info.df = frq[1]-frq[0] + #Info.fMax = frq[-1] + #Info.LFreq = len(frq) + #Info.LSeg = len(Y) + #Info.LWin = len(Y) + #Info.LOvlp = 0 + #Info.nFFT = len(Y) + #Info.nseg = 1 + Info.nPerDecade = nPerDecade + Info.xbins = xbins + + return frq2, PSD2, Info + + +def psd(y, fs=1.0, detrend ='constant', return_onesided=True): + """ Perform PSD without averaging """ + if not return_onesided: + raise NotImplementedError('Double sided todo') + + if detrend is None: + detrend=False + + if detrend=='constant' or detrend==True: + m=np.mean(y); + else: + m=0; + + n = len(y) + if n%2==0: + nhalf = int(n/2+1) + else: + nhalf = int((n+1)/2) + + frq = np.arange(nhalf)*fs/n; + Y = np.fft.rfft(y-m) #Y = np.fft.fft(y) + PSD = abs(Y[range(nhalf)])**2 /(n*fs) # PSD + PSD[1:-1] = PSD[1:-1]*2; + class InfoClass(): + pass + Info = InfoClass(); + Info.df = frq[1]-frq[0] + Info.fMax = frq[-1] + Info.LFreq = len(frq) + Info.LSeg = len(Y) + Info.LWin = len(Y) + Info.LOvlp = 0 + Info.nFFT = len(Y) + Info.nseg = 1 + return frq, PSD, Info + + +# --------------------------------------------------------------------------------} +# --- Windows +# --------------------------------------------------------------------------------{ +"""The suite of window functions.""" +def fnextpow2(x): + return 2**np.ceil( np.log(x)*0.99999999999/np.log(2)); + +def fDefaultWinLen(x,overlap_frac=0.5): + return fnextpow2(np.sqrt(len(x)/(1-overlap_frac))) + +def fDefaultWinLenMatlab(x): + return np.fix((len(x)-3)*2./9.) + +def _len_guards(M): + """Handle small or incorrect window lengths""" + if int(M) != M or M < 0: + raise ValueError('Window length M must be a non-negative integer') + return M <= 1 + +def _extend(M, sym): + """Extend window by 1 sample if needed for DFT-even symmetry""" + if not sym: + return M + 1, True + else: + return M, False + +def _truncate(w, needed): + """Truncate window by 1 sample if needed for DFT-even symmetry""" + if needed: + return w[:-1] + else: + return w + +def general_cosine(M, a, sym=True): + if _len_guards(M): + return np.ones(M) + M, needs_trunc = _extend(M, sym) + + fac = np.linspace(-np.pi, np.pi, M) + w = np.zeros(M) + for k in range(len(a)): + w += a[k] * np.cos(k * fac) + + return _truncate(w, needs_trunc) + + +def boxcar(M, sym=True): + """Return a boxcar or rectangular window. + + Also known as a rectangular window or Dirichlet window, this is equivalent + to no window at all. + """ + if _len_guards(M): + return np.ones(M) + M, needs_trunc = _extend(M, sym) + + w = np.ones(M, float) + + return _truncate(w, needs_trunc) + +def hann(M, sym=True): # same as hanning(*args, **kwargs): + return general_hamming(M, 0.5, sym) + + +def general_hamming(M, alpha, sym=True): + r"""Return a generalized Hamming window. + The generalized Hamming window is constructed by multiplying a rectangular + window by one period of a cosine function [1]_. + w(n) = \alpha - \left(1 - \alpha\right) \cos\left(\frac{2\pi{n}}{M-1}\right) + \qquad 0 \leq n \leq M-1 + """ + return general_cosine(M, [alpha, 1. - alpha], sym) + + +def hamming(M, sym=True): + r"""Return a Hamming window. + The Hamming window is a taper formed by using a raised cosine with + non-zero endpoints, optimized to minimize the nearest side lobe. + w(n) = 0.54 - 0.46 \cos\left(\frac{2\pi{n}}{M-1}\right) + \qquad 0 \leq n \leq M-1 + """ + return general_hamming(M, 0.54, sym) + +_win_equiv_raw = { + ('boxcar', 'box', 'ones', 'rect', 'rectangular'): (boxcar, False), + ('hamming', 'hamm', 'ham'): (hamming, False), + ('hanning', 'hann', 'han'): (hann, False), +} + +# Fill dict with all valid window name strings +_win_equiv = {} +for k, v in _win_equiv_raw.items(): + for key in k: + _win_equiv[key] = v[0] + +# Keep track of which windows need additional parameters +_needs_param = set() +for k, v in _win_equiv_raw.items(): + if v[1]: + _needs_param.update(k) + + +def get_window(window, Nx, fftbins=True): + """ + Return a window. + + Parameters + ---------- + window : string, float, or tuple + The type of window to create. See below for more details. + Nx : int + The number of samples in the window. + fftbins : bool, optional + If True (default), create a "periodic" window, ready to use with + `ifftshift` and be multiplied by the result of an FFT (see also + `fftpack.fftfreq`). + If False, create a "symmetric" window, for use in filter design. + """ + sym = not fftbins + try: + beta = float(window) + except (TypeError, ValueError): + args = () + if isinstance(window, tuple): + winstr = window[0] + if len(window) > 1: + args = window[1:] + elif isinstance(window, string_types): + if window in _needs_param: + raise ValueError("The '" + window + "' window needs one or " + "more parameters -- pass a tuple.") + else: + winstr = window + else: + raise ValueError("%s as window type is not supported." % + str(type(window))) + + try: + winfunc = _win_equiv[winstr] + except KeyError: + raise ValueError("Unknown window type.") + + params = (Nx,) + args + (sym,) + else: + winfunc = kaiser + params = (Nx, beta, sym) + + return winfunc(*params) + + + + + + +# --------------------------------------------------------------------------------} +# --- Helpers +# --------------------------------------------------------------------------------{ +def odd_ext(x, n, axis=-1): + """ + Odd extension at the boundaries of an array + Generate a new ndarray by making an odd extension of `x` along an axis. + """ + if n < 1: + return x + if n > x.shape[axis] - 1: + raise ValueError(("The extension length n (%d) is too big. " + + "It must not exceed x.shape[axis]-1, which is %d.") + % (n, x.shape[axis] - 1)) + left_end = axis_slice(x, start=0, stop=1, axis=axis) + left_ext = axis_slice(x, start=n, stop=0, step=-1, axis=axis) + right_end = axis_slice(x, start=-1, axis=axis) + right_ext = axis_slice(x, start=-2, stop=-(n + 2), step=-1, axis=axis) + ext = np.concatenate((2 * left_end - left_ext, + x, + 2 * right_end - right_ext), + axis=axis) + return ext + + +def even_ext(x, n, axis=-1): + """ + Even extension at the boundaries of an array + Generate a new ndarray by making an even extension of `x` along an axis. + """ + if n < 1: + return x + if n > x.shape[axis] - 1: + raise ValueError(("The extension length n (%d) is too big. " + + "It must not exceed x.shape[axis]-1, which is %d.") + % (n, x.shape[axis] - 1)) + left_ext = axis_slice(x, start=n, stop=0, step=-1, axis=axis) + right_ext = axis_slice(x, start=-2, stop=-(n + 2), step=-1, axis=axis) + ext = np.concatenate((left_ext, + x, + right_ext), + axis=axis) + return ext + + +def const_ext(x, n, axis=-1): + """ + Constant extension at the boundaries of an array + Generate a new ndarray that is a constant extension of `x` along an axis. + The extension repeats the values at the first and last element of + the axis. + """ + if n < 1: + return x + left_end = axis_slice(x, start=0, stop=1, axis=axis) + ones_shape = [1] * x.ndim + ones_shape[axis] = n + ones = np.ones(ones_shape, dtype=x.dtype) + left_ext = ones * left_end + right_end = axis_slice(x, start=-1, axis=axis) + right_ext = ones * right_end + ext = np.concatenate((left_ext, + x, + right_ext), + axis=axis) + return ext + + +def zero_ext(x, n, axis=-1): + """ + Zero padding at the boundaries of an array + Generate a new ndarray that is a zero padded extension of `x` along + an axis. + """ + if n < 1: + return x + zeros_shape = list(x.shape) + zeros_shape[axis] = n + zeros = np.zeros(zeros_shape, dtype=x.dtype) + ext = np.concatenate((zeros, x, zeros), axis=axis) + return ext + +def signaltools_detrend(data, axis=-1, type='linear', bp=0): + """ + Remove linear trend along axis from data. + + Parameters + ---------- + data : array_like + The input data. + axis : int, optional + The axis along which to detrend the data. By default this is the + last axis (-1). + type : {'linear', 'constant'}, optional + The type of detrending. If ``type == 'linear'`` (default), + the result of a linear least-squares fit to `data` is subtracted + from `data`. + If ``type == 'constant'``, only the mean of `data` is subtracted. + bp : array_like of ints, optional + A sequence of break points. If given, an individual linear fit is + performed for each part of `data` between two break points. + Break points are specified as indices into `data`. + + Returns + ------- + ret : ndarray + The detrended input data. + """ + if type not in ['linear', 'l', 'constant', 'c']: + raise ValueError("Trend type must be 'linear' or 'constant'.") + data = np.asarray(data) + dtype = data.dtype.char + if dtype not in 'dfDF': + dtype = 'd' + if type in ['constant', 'c']: + #print('Removing mean') + ret = data - np.expand_dims(np.mean(data, axis), axis) + return ret + else: + #print('Removing linear?') + dshape = data.shape + N = dshape[axis] + bp = sort(unique(r_[0, bp, N])) + if np.any(bp > N): + raise ValueError("Breakpoints must be less than length " + "of data along given axis.") + Nreg = len(bp) - 1 + # Restructure data so that axis is along first dimension and + # all other dimensions are collapsed into second dimension + rnk = len(dshape) + if axis < 0: + axis = axis + rnk + newdims = r_[axis, 0:axis, axis + 1:rnk] + newdata = reshape(np.transpose(data, tuple(newdims)), + (N, _prod(dshape) // N)) + newdata = newdata.copy() # make sure we have a copy + if newdata.dtype.char not in 'dfDF': + newdata = newdata.astype(dtype) + # Find leastsq fit and remove it for each piece + for m in range(Nreg): + Npts = bp[m + 1] - bp[m] + A = ones((Npts, 2), dtype) + A[:, 0] = cast[dtype](np.arange(1, Npts + 1) * 1.0 / Npts) + sl = slice(bp[m], bp[m + 1]) + coef, resids, rank, s = np.linalg.lstsq(A, newdata[sl]) + newdata[sl] = newdata[sl] - dot(A, coef) + # Put data back in original shape. + tdshape = take(dshape, newdims, 0) + ret = np.reshape(newdata, tuple(tdshape)) + vals = list(range(1, rnk)) + olddims = vals[:axis] + [0] + vals[axis:] + ret = np.transpose(ret, tuple(olddims)) + return ret + + + +# --------------------------------------------------------------------------------} +# --- Spectral Averaging +# --------------------------------------------------------------------------------{ +"""Tools for spectral analysis. """ + +def welch(x, fs=1.0, window='hann', nperseg=None, noverlap=None, nfft=None, + detrend='constant', return_onesided=True, scaling='density', + axis=-1): + """Interface identical to scipy.signal """ + + if detrend==True: + detrend='constant' + + freqs, Pxx = csd(x, x, fs, window, nperseg, noverlap, nfft, detrend, return_onesided, scaling, axis) + return freqs, Pxx.real + +#>>>> +def pwelch(x, window='hamming', noverlap=None, nfft=None, fs=1.0, nperseg=None, + detrend=False, return_onesided=True, scaling='density', + axis=-1): + r""" + NOTE: interface and default options modified to match matlab's implementation + >> detrend: default to False + >> window : default to 'hamming' + >> window: if an integer, use 'hamming(window, sym=True)' + + + Estimate power spectral density using Welch's method. + + Welch's method [1]_ computes an estimate of the power spectral + density by dividing the data into overlapping segments, computing a + modified periodogram for each segment and averaging the + periodograms. + + Parameters + ---------- + x : array_like + Time series of measurement values + fs : float, optional + Sampling frequency of the `x` time series. Defaults to 1.0. + window : str or tuple or array_like, optional + Desired window to use. If `window` is a string or tuple, it is + passed to `get_window` to generate the window values, which are + DFT-even by default. See `get_window` for a list of windows and + required parameters. If `window` is array_like it will be used + directly as the window and its length must be nperseg. Defaults + to a Hann window. + nperseg : int, optional + Length of each segment. Defaults to None, but if window is str or + tuple, is set to 256, and if window is array_like, is set to the + length of the window. + noverlap : int, optional + Number of points to overlap between segments. If `None`, + ``noverlap = nperseg // 2``. Defaults to `None`. + nfft : int, optional + Length of the FFT used, if a zero padded FFT is desired. If + `None`, the FFT length is `nperseg`. Defaults to `None`. + detrend : str or function or `False`, optional + Specifies how to detrend each segment. If `detrend` is a + string, it is passed as the `type` argument to the `detrend` + function. If it is a function, it takes a segment and returns a + detrended segment. If `detrend` is `False`, no detrending is + done. Defaults to 'constant'. + return_onesided : bool, optional + If `True`, return a one-sided spectrum for real data. If + `False` return a two-sided spectrum. Note that for complex + data, a two-sided spectrum is always returned. + scaling : { 'density', 'spectrum' }, optional + Selects between computing the power spectral density ('density') + where `Pxx` has units of V**2/Hz and computing the power + spectrum ('spectrum') where `Pxx` has units of V**2, if `x` + is measured in V and `fs` is measured in Hz. Defaults to + 'density' + axis : int, optional + Axis along which the periodogram is computed; the default is + over the last axis (i.e. ``axis=-1``). + + Returns + ------- + f : ndarray + Array of sample frequencies. + Pxx : ndarray + Power spectral density or power spectrum of x. + + See Also + -------- + periodogram: Simple, optionally modified periodogram + lombscargle: Lomb-Scargle periodogram for unevenly sampled data + + Notes + ----- + An appropriate amount of overlap will depend on the choice of window + and on your requirements. For the default Hann window an overlap of + 50% is a reasonable trade off between accurately estimating the + signal power, while not over counting any of the data. Narrower + windows may require a larger overlap. + + If `noverlap` is 0, this method is equivalent to Bartlett's method + [2]_. + + .. versionadded:: 0.12.0 + + References + ---------- + .. [1] P. Welch, "The use of the fast Fourier transform for the + estimation of power spectra: A method based on time averaging + over short, modified periodograms", IEEE Trans. Audio + Electroacoust. vol. 15, pp. 70-73, 1967. + .. [2] M.S. Bartlett, "Periodogram Analysis and Continuous Spectra", + Biometrika, vol. 37, pp. 1-16, 1950. + + """ + import math + def fnextpow2(x): + return 2**math.ceil( math.log(x)*0.99999999999/math.log(2)); + + # MANU >>> CHANGE OF DEFAULT OPTIONS + # MANU - If a length is provided use symmetric hamming window + if type(window)==int: + window=hamming(window, True) + # MANU - do not use 256 as default + if isinstance(window, string_types) or isinstance(window, tuple): + if nperseg is None: + if noverlap is None: + overlap_frac=0.5 + elif noverlap == 0: + overlap_frac=0 + else: + raise NotImplementedError('TODO noverlap set but not nperseg') + #nperseg = 256 # then change to default + nperseg=fnextpow2(math.sqrt(x.shape[-1]/(1-overlap_frac))); + + # MANU accepting true as detrend + if detrend==True: + detrend='constant' + + freqs, Pxx, Info = csd(x, x, fs, window, nperseg, noverlap, nfft, detrend, + return_onesided, scaling, axis) + + return freqs, Pxx.real, Info + + +def csd(x, y, fs=1.0, window='hann', nperseg=None, noverlap=None, nfft=None, + detrend='constant', return_onesided=True, scaling='density', axis=-1): + r""" + Estimate the cross power spectral density, Pxy, using Welch's + method. + """ + + freqs, _, Pxy, Info = _spectral_helper(x, y, fs, window, nperseg, noverlap, nfft, + detrend, return_onesided, scaling, axis, + mode='psd') + + # Average over windows. + if len(Pxy.shape) >= 2 and Pxy.size > 0: + if Pxy.shape[-1] > 1: + Pxy = Pxy.mean(axis=-1) + else: + Pxy = np.reshape(Pxy, Pxy.shape[:-1]) + + return freqs, Pxy, Info + + + +def coherence(x, y, fs=1.0, window='hann', nperseg=None, noverlap=None, + nfft=None, detrend='constant', axis=-1): + r""" + Estimate the magnitude squared coherence estimate, Cxy, of + discrete-time signals X and Y using Welch's method. + + ``Cxy = abs(Pxy)**2/(Pxx*Pyy)``, where `Pxx` and `Pyy` are power + spectral density estimates of X and Y, and `Pxy` is the cross + spectral density estimate of X and Y. + """ + + freqs, Pxx, Infoxx = welch(x, fs, window, nperseg, noverlap, nfft, detrend, axis=axis) + _, Pyy, Infoyy = welch(y, fs, window, nperseg, noverlap, nfft, detrend, axis=axis) + _, Pxy, Infoxy = csd(x, y, fs, window, nperseg, noverlap, nfft, detrend, axis=axis) + + Cxy = np.abs(Pxy)**2 / Pxx / Pyy + + return freqs, Cxy, Infoxx + + +def _spectral_helper(x, y, fs=1.0, window='hann', nperseg=None, noverlap=None, + nfft=None, detrend='constant', return_onesided=True, + scaling='spectrum', axis=-1, mode='psd', boundary=None, + padded=False): + """ Calculate various forms of windowed FFTs for PSD, CSD, etc. """ + if mode not in ['psd', 'stft']: + raise ValueError("Unknown value for mode %s, must be one of: " + "{'psd', 'stft'}" % mode) + + + + + + boundary_funcs = {'even': even_ext, + 'odd': odd_ext, + 'constant': const_ext, + 'zeros': zero_ext, + None: None} + + if boundary not in boundary_funcs: + raise ValueError("Unknown boundary option '{0}', must be one of: {1}" + .format(boundary, list(boundary_funcs.keys()))) + + # If x and y are the same object we can save ourselves some computation. + same_data = y is x + + if not same_data and mode != 'psd': + raise ValueError("x and y must be equal if mode is 'stft'") + + axis = int(axis) + + # Ensure we have np.arrays, get outdtype + x = np.asarray(x) + if not same_data: + y = np.asarray(y) + outdtype = np.result_type(x, y, np.complex64) + else: + outdtype = np.result_type(x, np.complex64) + + if not same_data: + # Check if we can broadcast the outer axes together + xouter = list(x.shape) + youter = list(y.shape) + xouter.pop(axis) + youter.pop(axis) + try: + outershape = np.broadcast(np.empty(xouter), np.empty(youter)).shape + except ValueError: + raise ValueError('x and y cannot be broadcast together.') + + if same_data: + if x.size == 0: + return np.empty(x.shape), np.empty(x.shape), np.empty(x.shape) + else: + if x.size == 0 or y.size == 0: + outshape = outershape + (min([x.shape[axis], y.shape[axis]]),) + emptyout = np.rollaxis(np.empty(outshape), -1, axis) + return emptyout, emptyout, emptyout + + if x.ndim > 1: + if axis != -1: + x = np.rollaxis(x, axis, len(x.shape)) + if not same_data and y.ndim > 1: + y = np.rollaxis(y, axis, len(y.shape)) + + # Check if x and y are the same length, zero-pad if necessary + if not same_data: + if x.shape[-1] != y.shape[-1]: + if x.shape[-1] < y.shape[-1]: + pad_shape = list(x.shape) + pad_shape[-1] = y.shape[-1] - x.shape[-1] + x = np.concatenate((x, np.zeros(pad_shape)), -1) + else: + pad_shape = list(y.shape) + pad_shape[-1] = x.shape[-1] - y.shape[-1] + y = np.concatenate((y, np.zeros(pad_shape)), -1) + + if nperseg is not None: # if specified by user + nperseg = int(nperseg) + if nperseg < 1: + raise ValueError('nperseg must be a positive integer') + + # parse window; if array like, then set nperseg = win.shape + win, nperseg = _triage_segments(window, nperseg,input_length=x.shape[-1]) + + if nfft is None: + nfft = nperseg + elif nfft < nperseg: + raise ValueError('nfft must be greater than or equal to nperseg.') + else: + nfft = int(nfft) + + if noverlap is None: + noverlap = nperseg//2 + else: + noverlap = int(noverlap) + if noverlap >= nperseg: + raise ValueError('noverlap must be less than nperseg.') + nstep = nperseg - noverlap + + # Padding occurs after boundary extension, so that the extended signal ends + # in zeros, instead of introducing an impulse at the end. + # I.e. if x = [..., 3, 2] + # extend then pad -> [..., 3, 2, 2, 3, 0, 0, 0] + # pad then extend -> [..., 3, 2, 0, 0, 0, 2, 3] + + if boundary is not None: + ext_func = boundary_funcs[boundary] + x = ext_func(x, nperseg//2, axis=-1) + if not same_data: + y = ext_func(y, nperseg//2, axis=-1) + + if padded: + # Pad to integer number of windowed segments + # I.e make x.shape[-1] = nperseg + (nseg-1)*nstep, with integer nseg + nadd = (-(x.shape[-1]-nperseg) % nstep) % nperseg + zeros_shape = list(x.shape[:-1]) + [nadd] + x = np.concatenate((x, np.zeros(zeros_shape)), axis=-1) + if not same_data: + zeros_shape = list(y.shape[:-1]) + [nadd] + y = np.concatenate((y, np.zeros(zeros_shape)), axis=-1) + + # Handle detrending and window functions + if not detrend: + def detrend_func(d): + return d + elif not hasattr(detrend, '__call__'): + def detrend_func(d): + return signaltools_detrend(d, type=detrend, axis=-1) + elif axis != -1: + # Wrap this function so that it receives a shape that it could + # reasonably expect to receive. + def detrend_func(d): + d = np.rollaxis(d, -1, axis) + d = detrend(d) + return np.rollaxis(d, axis, len(d.shape)) + else: + detrend_func = detrend + + if np.result_type(win,np.complex64) != outdtype: + win = win.astype(outdtype) + + if scaling == 'density': + scale = 1.0 / (fs * (win*win).sum()) + elif scaling == 'spectrum': + scale = 1.0 / win.sum()**2 + else: + raise ValueError('Unknown scaling: %r' % scaling) + + if mode == 'stft': + scale = np.sqrt(scale) + + if return_onesided: + if np.iscomplexobj(x): + sides = 'twosided' + #warnings.warn('Input data is complex, switching to ' 'return_onesided=False') + else: + sides = 'onesided' + if not same_data: + if np.iscomplexobj(y): + sides = 'twosided' + #warnings.warn('Input data is complex, switching to return_onesided=False') + else: + sides = 'twosided' + + if sides == 'twosided': + raise Exception('NOT IMPLEMENTED') + #freqs = fftpack.fftfreq(nfft, 1/fs) + elif sides == 'onesided': + freqs = np.fft.rfftfreq(nfft, 1/fs) + + # Perform the windowed FFTs + result = _fft_helper(x, win, detrend_func, nperseg, noverlap, nfft, sides) + + if not same_data: + # All the same operations on the y data + result_y = _fft_helper(y, win, detrend_func, nperseg, noverlap, nfft, + sides) + result = np.conjugate(result) * result_y + elif mode == 'psd': + result = np.conjugate(result) * result + + result *= scale + if sides == 'onesided' and mode == 'psd': + if nfft % 2: + result[..., 1:] *= 2 + else: + # Last point is unpaired Nyquist freq point, don't double + result[..., 1:-1] *= 2 + + time = np.arange(nperseg/2, x.shape[-1] - nperseg/2 + 1, + nperseg - noverlap)/float(fs) + if boundary is not None: + time -= (nperseg/2) / fs + + result = result.astype(outdtype) + + # All imaginary parts are zero anyways + if same_data and mode != 'stft': + result = result.real + + # Output is going to have new last axis for time/window index, so a + # negative axis index shifts down one + if axis < 0: + axis -= 1 + + # Roll frequency axis back to axis where the data came from + result = np.rollaxis(result, -1, axis) + + # TODO + class InfoClass(): + pass + Info = InfoClass(); + Info.df=freqs[1]-freqs[0] + Info.fMax=freqs[-1] + Info.LFreq=len(freqs) + Info.LSeg=nperseg + Info.LWin=len(win) + Info.LOvlp=noverlap + Info.nFFT=nfft + Info.nseg=-1 + #print('df:{:.3f} - fm:{:.2f} - nseg:{} - Lf:{:5d} - Lseg:{:5d} - Lwin:{:5d} - Lovlp:{:5d} - Nfft:{:5d} - Lsig:{}'.format(freqs[1]-freqs[0],freqs[-1],-1,len(freqs),nperseg,len(win),noverlap,nfft,x.shape[-1])) + return freqs, time, result, Info + + +def _fft_helper(x, win, detrend_func, nperseg, noverlap, nfft, sides): + """ Calculate windowed FFT """ + # Created strided array of data segments + if nperseg == 1 and noverlap == 0: + result = x[..., np.newaxis] + else: + # http://stackoverflow.com/a/5568169 + step = nperseg - noverlap + shape = x.shape[:-1]+((x.shape[-1]-noverlap)//step, nperseg) + strides = x.strides[:-1]+(step*x.strides[-1], x.strides[-1]) + result = np.lib.stride_tricks.as_strided(x, shape=shape, + strides=strides) + + # Detrend each data segment individually + result = detrend_func(result) + + # Apply window by multiplication + result = win * result + + # Perform the fft. Acts on last axis by default. Zero-pads automatically + if sides == 'twosided': + raise Exception('NOT IMPLEMENTED') + #func = fftpack.fft + else: + result = result.real + func = np.fft.rfft + result = func(result, n=nfft) + + return result + +def _triage_segments(window, nperseg,input_length): + """ + Parses window and nperseg arguments for spectrogram and _spectral_helper. + This is a helper function, not meant to be called externally. + """ + + #parse window; if array like, then set nperseg = win.shape + if isinstance(window, string_types) or isinstance(window, tuple): + # if nperseg not specified + if nperseg is None: + nperseg = 256 # then change to default + if nperseg > input_length: + print('nperseg = {0:d} is greater than input length ' + ' = {1:d}, using nperseg = {1:d}' + .format(nperseg, input_length)) + nperseg = input_length + win = get_window(window, nperseg) + else: + win = np.asarray(window) + if len(win.shape) != 1: + raise ValueError('window must be 1-D') + if input_length < win.shape[-1]: + raise ValueError('window is longer than input signal') + if nperseg is None: + nperseg = win.shape[0] + elif nperseg is not None: + if nperseg != win.shape[0]: + raise ValueError("value specified for nperseg is different from" + " length of window") + + return win, nperseg + + + + + + +# --------------------------------------------------------------------------------} +# --- Unittests +# --------------------------------------------------------------------------------{ +import unittest + +class TestSpectral(unittest.TestCase): + + def test_fft_amplitude(self): + dt=0.1 + t=np.arange(0,10,dt); + f0=1; + A=5; + y=A*np.sin(2*np.pi*f0*t) + f,Y,_=fft_amplitude(y,fs=1/dt,detrend=False) + i=np.argmax(Y) + self.assertAlmostEqual(Y[i],A) + self.assertAlmostEqual(f[i],f0) + + def test_fft_binning(self): + dt=0.1 + t=np.arange(0,10,dt); + f0=1; + A=5; + y=A*np.sin(2*np.pi*f0*t) + + f, Y, Info = psd_binned(y, fs=1/dt, nPerDecade=10, detrend ='constant') + f2, Y2, Info2 = psd (y, fs=1/dt, detrend ='constant') + #print(f) + #print(Y) + + #import matplotlib.pyplot as plt + #fig,ax = plt.subplots(1, 1, sharey=False, figsize=(6.4,4.8)) # (6.4,4.8) + #fig.subplots_adjust(left=0.12, right=0.95, top=0.95, bottom=0.11, hspace=0.20, wspace=0.20) + #ax.plot( f2, Y2 , label='Full') + #ax.plot( f, Y , label='Binned') + #ax.set_xlabel('') + #ax.set_ylabel('') + #ax.legend() + #plt.show() + +if __name__ == '__main__': + #TestSpectral().test_fft_binning() + unittest.main() + From 0a1b94879d5d0f0eaa198f325f9da88c5a3951ba Mon Sep 17 00:00:00 2001 From: Emmanuel Branlard Date: Fri, 22 Apr 2022 10:59:22 -0600 Subject: [PATCH 30/36] Installer: adding scipy.signal --- installer.cfg | 1 - 1 file changed, 1 deletion(-) diff --git a/installer.cfg b/installer.cfg index 32d14b3..0211ce8 100644 --- a/installer.cfg +++ b/installer.cfg @@ -98,7 +98,6 @@ exclude=weio/.git* pkgs/scipy/io/harwell_boieng/tests pkgs/scipy/ndimage pkgs/scipy/odr - pkgs/scipy/signal pkgs/scipy/extra-dll/libbanded* pkgs/scipy/extra-dll/libd_odr* pkgs/scipy/extra-dll/libdcosqb* From 004107bf91f743813eaf628ef97eafda094f598b Mon Sep 17 00:00:00 2001 From: Emmanuel Branlard Date: Fri, 22 Apr 2022 10:59:49 -0600 Subject: [PATCH 31/36] Update of weio --- weio | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/weio b/weio index 9243842..4d421ba 160000 --- a/weio +++ b/weio @@ -1 +1 @@ -Subproject commit 9243842cf41c2a3437c590c58e633c7019381b23 +Subproject commit 4d421ba27d38d67e500fdf5eb3477614b4baecca From 037a37b3bb199cb5c7501e2fc4f5cbef1eef7570 Mon Sep 17 00:00:00 2001 From: Emmanuel Branlard Date: Fri, 22 Apr 2022 11:04:50 -0600 Subject: [PATCH 32/36] Using weio.weio --- pydatview/Tables.py | 2 +- pydatview/fast/fastfarm.py | 25 +++++++++++----------- pydatview/main.py | 2 +- pydatview/tools/signal.py | 44 ++++++++++++++++++++++++++++++++++++-- weio | 2 +- 5 files changed, 57 insertions(+), 18 deletions(-) diff --git a/pydatview/Tables.py b/pydatview/Tables.py index cd433a4..7974b75 100644 --- a/pydatview/Tables.py +++ b/pydatview/Tables.py @@ -7,7 +7,7 @@ except: from common import no_unit, ellude_common, getDt try: - import weio # File Formats and File Readers + import weio.weio as weio# File Formats and File Readers except: print('') print('Error: the python package `weio` was not imported successfully.\n') diff --git a/pydatview/fast/fastfarm.py b/pydatview/fast/fastfarm.py index 950433a..1c939c0 100644 --- a/pydatview/fast/fastfarm.py +++ b/pydatview/fast/fastfarm.py @@ -2,10 +2,9 @@ import glob import numpy as np import pandas as pd -try: - import weio -except: - raise Exception('Python package `weio` not found, please install it from https://github.com/ebranlard/weio ') +from weio.weio.fast_input_file import FASTInputFile +from weio.weio.fast_output_file import FASTOutputFile +from weio.weio.turbsim_file import TurbSimFile from . import fastlib @@ -49,13 +48,13 @@ def writeFSTandDLL(FstT1Name, nWT): FstT1Full = os.path.abspath(FstT1Name).replace('\\','/') FstDir = os.path.dirname(FstT1Full) - fst=weio.read(FstT1Name) + fst=FASTInputFile(FstT1Name) SrvT1Name = fst['ServoFile'].strip('"') SrvT1Full = os.path.join(FstDir, SrvT1Name).replace('\\','/') SrvDir = os.path.dirname(SrvT1Full) SrvT1RelFst = os.path.relpath(SrvT1Full,FstDir) if os.path.exists(SrvT1Full): - srv=weio.read(SrvT1Full) + srv=FASTInputFile(SrvT1Full) DLLT1Name = srv['DLL_FileName'].strip('"') DLLT1Full = os.path.join(SrvDir, DLLT1Name) if os.path.exists(DLLT1Full): @@ -139,12 +138,12 @@ def rectangularLayoutSubDomains(D,Lx,Ly): return XWT, YWT, ZWT -def fastFarmTurbSimExtent(TurbSimFile, HubHeight, D, xWT, yWT, Cmeander=1.9, Chord_max=3, extent_X=1.2, extent_Y=1.2): +def fastFarmTurbSimExtent(TurbSimFilename, HubHeight, D, xWT, yWT, Cmeander=1.9, Chord_max=3, extent_X=1.2, extent_Y=1.2): """ Determines "Ambient Wind" box parametesr for FastFarm, based on a TurbSimFile ('bts') """ # --- TurbSim data - ts = weio.read(TurbSimFile) + ts = TurbSimFile(TurbSimFilename) #iy,iz = ts.closestPoint(y=0,z=HubHeight) #iy,iz = ts.closestPoint(y=0,z=HubHeight) zMid, uMid = ts.midValues() @@ -252,7 +251,7 @@ def writeFastFarm(outputFile, templateFile, xWT, yWT, zWT, FFTS=None, OutListT1= FFTS: FastFarm TurbSim parameters as returned by fastFarmTurbSimExtent """ # --- Read template fast farm file - fst=weio.FASTInputFile(templateFile) + fst=FASTInputFile(templateFile) # --- Replace box extent values if FFTS is not None: fst['Mod_AmbWind'] = 2 @@ -292,7 +291,7 @@ def writeFastFarm(outputFile, templateFile, xWT, yWT, zWT, FFTS=None, OutListT1= def setFastFarmOutputs(fastFarmFile, OutListT1): """ Duplicate the output list, by replacing "T1" with T1->Tn """ - fst = weio.read(fastFarmFile) + fst = FASTInputFile(fastFarmFile) nWTOut = min(fst['NumTurbines'],9) # Limited to 9 turbines OutList=[''] for s in OutListT1: @@ -308,7 +307,7 @@ def setFastFarmOutputs(fastFarmFile, OutListT1): def plotFastFarmSetup(fastFarmFile): """ """ import matplotlib.pyplot as plt - fst=weio.FASTInputFile(fastFarmFile) + fst=FASTInputFile(fastFarmFile) fig = plt.figure(figsize=(13.5,10)) ax = fig.add_subplot(111,aspect="equal") @@ -432,7 +431,7 @@ def spanwisePostProFF(fastfarm_input,avgMethod='constantwindow',avgParam=30,D=1, """ # --- Opening ouputfile if df is None: - df=weio.read(fastfarm_out).toDataFrame() + df=FASTOutputFile(fastfarm_out).toDataFrame() # --- Opening input file and extracting inportant variables if fastfarm_input is None: @@ -447,7 +446,7 @@ def spanwisePostProFF(fastfarm_input,avgMethod='constantwindow',avgParam=30,D=1, vD=None D=0 else: - main=weio.FASTInputFile(fastfarm_input) + main=FASTInputFile(fastfarm_input) iOut = main['OutRadii'] dr = main['dr'] # Radial increment of radial finite-difference grid (m) OutDist = main['OutDist'] # List of downstream distances for wake output for an individual rotor diff --git a/pydatview/main.py b/pydatview/main.py index b4409c0..b82559c 100644 --- a/pydatview/main.py +++ b/pydatview/main.py @@ -39,7 +39,7 @@ from .plugins import dataPlugins try: - import weio # File Formats and File Readers + import weio.weio as weio# File Formats and File Readers except: print('') print('Error: the python package `weio` was not imported successfully.\n') diff --git a/pydatview/tools/signal.py b/pydatview/tools/signal.py index e32da6e..b8371a7 100644 --- a/pydatview/tools/signal.py +++ b/pydatview/tools/signal.py @@ -58,15 +58,19 @@ def multiInterp(x, xp, fp, extrap='bounded'): INPUTS: - x : array ( n ), new values - xp : array ( np ), old values - - fp : array ( np x ncol), matrix values to be interpolated + - fp : array ( nval, np), matrix values to be interpolated """ + # Sanity + x = np.asarray(x) + xp = np.asarray(xp) + assert fp.shape[1]==len(xp), 'Second dimension of fp should have the same length as xp' + j = np.searchsorted(xp, x) - 1 dd = np.zeros(len(x)) bOK = np.logical_and(j>=0, j< len(xp)-1) bLower =j<0 bUpper =j>=len(xp)-1 jOK = j[bOK] - #import pdb; pdb.set_trace() dd[bOK] = (x[bOK] - xp[jOK]) / (xp[jOK + 1] - xp[jOK]) jBef=j jAft=j+1 @@ -86,6 +90,42 @@ def multiInterp(x, xp, fp, extrap='bounded'): return (1 - dd) * fp[:,jBef] + fp[:,jAft] * dd +def interpArray(x, xp, fp, extrap='bounded'): + """ + Interpolate all the columns of a matrix `fp` based on one new value `x` + INPUTS: + - x : scalar new values + - xp : array ( np ), old values + - fp : array ( nval, np), matrix values to be interpolated + """ + # Sanity + xp = np.asarray(xp) + assert fp.shape[1]==len(xp), 'Second dimension of fp should have the same length as xp' + + j = np.searchsorted(xp, x) - 1 + if j<0: + # Before bounds + if extrap=='bounded': + return fp[:,0] + elif extrap=='nan': + return fp[:,0]*np.nan + else: + raise NotImplementedError() + + elif j>=len(xp)-1: + # After bounds + if extrap=='bounded': + return fp[:,-1] + elif extrap=='nan': + return fp[:,-1]*np.nan + else: + raise NotImplementedError() + else: + # Normal case, within bounds + dd = (x- xp[j]) / (xp[j+1] - xp[j]) + return (1 - dd) * fp[:,j] + fp[:,j+1] * dd + + def resample_interp(x_old, x_new, y_old=None, df_old=None): #x_new=np.sort(x_new) if df_old is not None: diff --git a/weio b/weio index 4d421ba..6ededa8 160000 --- a/weio +++ b/weio @@ -1 +1 @@ -Subproject commit 4d421ba27d38d67e500fdf5eb3477614b4baecca +Subproject commit 6ededa8b9641aa51873d8464eebdc0349366bcb8 From 37939c61af853c7f7766ee0f9b3570b6f37604a2 Mon Sep 17 00:00:00 2001 From: Emmanuel Branlard Date: Fri, 22 Apr 2022 11:17:14 -0600 Subject: [PATCH 33/36] Installer: adding fftpack --- installer.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/installer.cfg b/installer.cfg index 0211ce8..0d09462 100644 --- a/installer.cfg +++ b/installer.cfg @@ -91,7 +91,6 @@ exclude=weio/.git* pkgs/wx/lib/wxcairo pkgs/scipy/cluster pkgs/scipy/constants - pkgs/scipy/fftpack pkgs/scipy/io/tests pkgs/scipy/io/arff pkgs/scipy/io/matlab/tests @@ -104,6 +103,7 @@ exclude=weio/.git* pkgs/scipy/extra-dll/libdfft_sub* pkgs/scipy/*/tests +# pkgs/scipy/fftpack # pkgs\matplotlib\mpl-data ##Click==7.0 From b017b8764f0a1aece5361fcc207ea1637c797878 Mon Sep 17 00:00:00 2001 From: Emmanuel Branlard Date: Fri, 22 Apr 2022 11:31:18 -0600 Subject: [PATCH 34/36] Installer: discarding scignal and fftpack again --- installer.cfg | 3 ++- pydatview/tools/spectral.py | 11 ++++++++--- weio | 2 +- 3 files changed, 11 insertions(+), 5 deletions(-) diff --git a/installer.cfg b/installer.cfg index 0d09462..ac76f6d 100644 --- a/installer.cfg +++ b/installer.cfg @@ -102,8 +102,9 @@ exclude=weio/.git* pkgs/scipy/extra-dll/libdcosqb* pkgs/scipy/extra-dll/libdfft_sub* pkgs/scipy/*/tests + pkgs/scipy/fftpack + pkgs/scipy/signal -# pkgs/scipy/fftpack # pkgs\matplotlib\mpl-data ##Click==7.0 diff --git a/pydatview/tools/spectral.py b/pydatview/tools/spectral.py index baaf43b..75f1659 100644 --- a/pydatview/tools/spectral.py +++ b/pydatview/tools/spectral.py @@ -672,7 +672,9 @@ def fnextpow2(x): def csd(x, y, fs=1.0, window='hann', nperseg=None, noverlap=None, nfft=None, - detrend='constant', return_onesided=True, scaling='density', axis=-1): + detrend='constant', return_onesided=True, scaling='density', axis=-1, + returnInfo=False + ): r""" Estimate the cross power spectral density, Pxy, using Welch's method. @@ -689,7 +691,10 @@ def csd(x, y, fs=1.0, window='hann', nperseg=None, noverlap=None, nfft=None, else: Pxy = np.reshape(Pxy, Pxy.shape[:-1]) - return freqs, Pxy, Info + if returnInfo: + return freqs, Pxy, Info + else: + return freqs, Pxy @@ -706,7 +711,7 @@ def coherence(x, y, fs=1.0, window='hann', nperseg=None, noverlap=None, freqs, Pxx, Infoxx = welch(x, fs, window, nperseg, noverlap, nfft, detrend, axis=axis) _, Pyy, Infoyy = welch(y, fs, window, nperseg, noverlap, nfft, detrend, axis=axis) - _, Pxy, Infoxy = csd(x, y, fs, window, nperseg, noverlap, nfft, detrend, axis=axis) + _, Pxy, Infoxy = csd(x, y, fs, window, nperseg, noverlap, nfft, detrend, axis=axis, returnInfo=True) Cxy = np.abs(Pxy)**2 / Pxx / Pyy diff --git a/weio b/weio index 6ededa8..8f5b2b8 160000 --- a/weio +++ b/weio @@ -1 +1 @@ -Subproject commit 6ededa8b9641aa51873d8464eebdc0349366bcb8 +Subproject commit 8f5b2b8a387789910f6fc59b0f9466c3a160b4b6 From 9dfde8fc7bbe3381c7b1797391960477f7cee926 Mon Sep 17 00:00:00 2001 From: Emmanuel Branlard Date: Mon, 25 Apr 2022 11:24:07 -0600 Subject: [PATCH 35/36] Spectral: fix for new interface of csd --- pydatview/tools/spectral.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pydatview/tools/spectral.py b/pydatview/tools/spectral.py index 75f1659..fa71395 100644 --- a/pydatview/tools/spectral.py +++ b/pydatview/tools/spectral.py @@ -666,7 +666,7 @@ def fnextpow2(x): detrend='constant' freqs, Pxx, Info = csd(x, x, fs, window, nperseg, noverlap, nfft, detrend, - return_onesided, scaling, axis) + return_onesided, scaling, axis, returnInfo=True) return freqs, Pxx.real, Info From fe7e4fb3bde811328b23e0cf02f7b993f633a5d3 Mon Sep 17 00:00:00 2001 From: Emmanuel Branlard Date: Wed, 22 Jun 2022 11:11:17 -0600 Subject: [PATCH 36/36] Version: preparation for v0.3 release --- _tools/NewRelease.md | 9 +++++++++ installer.cfg | 2 +- pydatview/main.py | 2 +- 3 files changed, 11 insertions(+), 2 deletions(-) create mode 100644 _tools/NewRelease.md diff --git a/_tools/NewRelease.md b/_tools/NewRelease.md new file mode 100644 index 0000000..4f14fc9 --- /dev/null +++ b/_tools/NewRelease.md @@ -0,0 +1,9 @@ + + +Steps: +- Change PROG\_VERSION in pydateview/main.py +- Change version in installler.cfg +- Commit changes and push to pull request +- Merge pull request from old to new version +- Tag new version v0.x `git tag -a v0.x` +- Push tags: `git push --tags` diff --git a/installer.cfg b/installer.cfg index ac76f6d..f41020c 100644 --- a/installer.cfg +++ b/installer.cfg @@ -1,6 +1,6 @@ [Application] name=pyDatView -version=0.2 +version=0.3 entry_point=pydatview:show icon=ressources/pyDatView.ico diff --git a/pydatview/main.py b/pydatview/main.py index b82559c..483b880 100644 --- a/pydatview/main.py +++ b/pydatview/main.py @@ -56,7 +56,7 @@ # --- GLOBAL # --------------------------------------------------------------------------------{ PROG_NAME='pyDatView' -PROG_VERSION='v0.2-local' +PROG_VERSION='v0.3-local' SIDE_COL = [160,160,300,420,530] SIDE_COL_LARGE = [200,200,360,480,600] BOT_PANL =85