Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update tracers_to_PRISM.py #33

Open
wants to merge 6 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
104 changes: 104 additions & 0 deletions script/analysis/accumulate_PRISM.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
#!/usr/bin/env python

#"""
# Author: Kelsey Lund but based on Jonah Miller's analysis scripts
#
# Accumulate tracer data into a single file with values that are used as inputs for PRISM calculations.
# For now, this should only be used as a post-processing step in the sense that it assumes PRISM
# trajectory files already exist.
# To do: Build this structure into the cleanup_trace function in tracers_to_PRISM so that one of the
# outputs from that whole procedure is the accumulated file.
# """

import glob
import pickle
import traceback
import numpy as np
import hdf5_to_dict as io
from multiprocessing import Pool
from argparse import ArgumentParser
import units; units = units.get_cgs()

def find_t_closest(input,time):
tlst = []
for t in range(len(input)):
tlst.append(abs(input[t] - time))
return tlst.index(min(tlst))

def find_PRISM_start_t(trace_ID,dirt):
# Pulls out starting time of PRISM trajectory file (in seconds, beware!)
trid = int(trace_ID.split('trace_')[-1].split('.td')[0])
tstart = np.loadtxt(dirt + '/trace_%08d'%trace_ID + '/trajectory.dat')[0][0]
return tstart

def get_IDs(dirt):
# dirt should be something like 'analysis/traces/' WITH trailing backslash(for now)
# dirt should be traces directory that has (usually) 100 subdirectories
IDs = []
subdirs = sorted(glob.glob(dirt + '*/'))
for subdir in subdirs:
tr_ids = glob.glob(subdir + 'trace_*.td')
for ID in tr_ids:
IDs.append(ID)
return IDs

def loop(inputs):#tr_id,dirt):
try:
tr_id,pr_dirt = inputs # pr_dirt should be where PRISM trajectory files are- something like '/analysis/done_DND/'
trace = io.TracerData.fromfile(tr_id)
trace_times = trace['time'] * trace.units['T_unit']
vals = {}

start_t = find_PRISM_start_t(tr_id, pr_dirt)
index = find_t_closest(trace_times,start_t)

for key in trace.keys():
vals[key] = [trace[key][index]]
return vals
except:
traceback.print_exc()
#---------------------------------------------------------------------------------------#

parser = ArgumentParser()
parser.add_argument('trace_dir', type = str, help = ('Directory where subdirectories containing tracer files by IDs are located.'))
parser.add_argument('prism_dir', type=str, help = ('Directory where PRISM trajectory files are kept.'))
parser.add_argument('accumulated',type=str, help = ('Name of file to save accumulated tracers to.'))
parser.add_argument('-n','--nprocs',type=int, default = None, help=('Number of parallel procs to use.'))

if __name__ == "__main__":
try:
args = parser.parse_args()

fnams = get_IDs(args.trace_dir)
p = Pool(args.nprocs)

inputs = []
vals = {}
for f in fnams:
inputs.append((f,args.prism_dir))
vals_out = p.map(loop,inputs)
for key in vals_out.keys():
if key not in vals.keys():
vals[key] = vals[key]
else:
vals[key] = np.concatenate((vals[key],vals_out[key]))

with open(args.accumulated,'wb') as f:
pickle.dump(vals, f)
except:
traceback.print_exc()

#trace_dir = '/users/klund/scratch4/nubhlight/torus_gw/analysis/traces/'
#prism_dir = '/users/klund/scratch4/nubhlight/torus_gw/analysis/done_DND/'
#nprocs = 10


#trace_dir = '/users/klund/scratch4/nubhlight/torus_gw/analysis/traces/'
#prism_dir = '/users/klund/scratch4/nubhlight/torus_gw/analysis/done_DND/'
#fnams = accp.get_IDs(trace_dir)
#from multiprocessing import Pool
#p = Pool(10)
#inputs,vals = [],{}
#for f in fnams:
# inputs.append((f,prism_dir))
# vals_out = p.map(accP.loop,inputs)
101 changes: 50 additions & 51 deletions script/analysis/tracers_to_PRISM.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#!/usr/bin/env python
# Little script to convert tracer data to a form suitable for PRISM
# Author: Jonah Miller ([email protected])
# Based on the work of Matt Mumpower
# Based on the work of Matt Mumpower, mods by Kels

from __future__ import print_function, division
import sys; sys.dont_write_bytecode = True
Expand Down Expand Up @@ -174,23 +174,19 @@ def frdm_comp(Ye, T9, rho, filename, frdm_path):
def signal_smooth(x,window_len=11,window='hanning'):
"""From scipy cookbook:
https://scipy-cookbook.readthedocs.io/items/SignalSmooth.html

input:
x: the input signal
window_len: the dimension of the smoothing window;
should be an odd integer
window: the type of window from
'flat', 'hanning', 'hamming', 'bartlett', 'blackman'
flat window will produce a moving average smoothing.

x: the input signal
window_len: the dimension of the smoothing window;
should be an odd integer
window: the type of window from
'flat', 'hanning', 'hamming', 'bartlett', 'blackman'
flat window will produce a moving average smoothing.
output:
the smoothed signal

the smoothed signal
example:

t=linspace(-2,2,0.1)
x=sin(t)+randn(len(t))*0.1
y=smooth(x)
t=linspace(-2,2,0.1)
x=sin(t)+randn(len(t))*0.1
y=smooth(x)
"""
if x.ndim != 1:
raise ValueError("smooth only accepts 1 dimension arrays.")
Expand Down Expand Up @@ -259,58 +255,70 @@ def value_crossing(a, v):
second = residual[1:]
return np.logical_and((first*second)<=0,first > 0)


def cleanup_trace(trace,
T9 = 10, atol = 1,
p = 6, thresh = 1e-1):
"""Takes trace and cleans it up so it's suitable for PRISM"""
# output trace
trsm,trace_out = {},{}
Tunit = cgs['MEV']/cgs['GK']

trsm['T'],trsm['rho'] = log_smooth(trace['T']),log_smooth(trace['rho'])

# starting temperature
#Tgk = trace['T']*cgs['MEV']/cgs['GK']
Tgk = trsm['T']*cgs['MEV']/cgs['GK']
Tgk = trsm['T']*Tunit
TgkT9 = value_crossing(Tgk, T9)
sidx = np.where(TgkT9)[0][-1] + 1
# Need to account for possibility that there is no T9 crossing point
# in which case TgkT9 will just be an array full of False values
if len(np.where(TgkT9)[0]) == 0:
sidx = 0
print('No T > T9 = ',T9,' exists. Starting at T = ',Tgk[sidx])
else:
sidx = np.where(TgkT9)[0][-1] + 1
print('Initial is < T9 of',T9,'; Starting at crossing point T = ',Tgk[sidx])

# Sometimes the temperature is not actually that close to T9=10,
# so this will do a little interpolation to find a suitable
# starting point.

if T9 > Tgk[sidx:][0] > T9-atol or Tgk[0] < T9:
if Tgk[0] < T9:
print("WARNING: Tracer {} starting at temperature {} < T9 = {}".format(trace['id'][0], Tgk[0], T9))
sidx = 0
if T9 > Tgk[sidx:][0] > T9-atol or Tgk[0] < T9:
print(Tgk[sidx:][0])
for k in trace.keys() - ['T','rho']:
trace_out[k] = trace[k][sidx:]
trace_out['T'],trace_out['rho'] = trsm['T'][sidx:],trsm['rho'][sidx:]
else:
interpsidx = interpolate.interp1d(trace['time'][sidx-1:sidx+1],Tgk[sidx-1:sidx+1])
residual = lambda t: interpsidx(t) - T9
root_results = optimize.root_scalar(residual, bracket=(trace['time'][sidx-1],trace['time'][sidx+1]))
if not root_results.converged:
raise ValueError("Root finding failed for tracer {}".format(trace['id'][0]))
new_time = root_results.root
new_val = interpsidx(new_time)
trace_out['T'] = np.insert(trsm['T'][sidx:],0,new_val/Tunit)
trace_out['time'] = np.insert(trace['time'][sidx:],0,new_time)
newx = np.arange(trace['time'][sidx-1],trace['time'][sidx],0.1)
tgk_interp = interpsidx(newx)
ind = np.where(np.isclose(tgk_interp,T9,atol=0.1))[0][-1]
trace_out['T'] = np.insert(trsm['T'][sidx:],0,tgk_interp[ind]/Tunit)
trace_out['time'] = np.insert(trace['time'][sidx:],0,newx[ind])
# Still need to get corresponding other key values
for k in trace.keys() - ['time','T']:
trace_in = trsm if k in trsm.keys() else trace
interpsidx = interpolate.interp1d(trace['time'][sidx-1:sidx+1],trace_in[k][sidx-1:sidx+1],axis=0)
trace_out[k] = np.insert(trace_in[k][sidx:],0,interpsidx(new_time),axis=0)
if trace[k].size == len(trace):
if k == 'rho':
interpsidx = interpolate.interp1d(trace['time'][sidx-1:sidx+1],trsm[k][sidx-1:sidx+1])
interp = interpsidx(newx)
trace_out[k] = np.insert(trsm[k][sidx:],0,interp[ind])
else:
interpsidx = interpolate.interp1d(trace['time'][sidx-1:sidx+1],trace[k][sidx-1:sidx+1])
interp = interpsidx(newx)
trace_out[k] = np.insert(trace[k][sidx:],0,interp[ind])
else:
dim = int(trace[k].size/len(trace))
temp,trace_out[k] = {},[]
for i in range(dim):
interpsidx = interpolate.interp1d(trace['time'][sidx-1:sidx+1],trace[k][:,i][sidx-1:sidx+1])
interp = interpsidx(newx)
temp[i] = np.insert(trace[k][sidx:][:,i],0,interp[ind])
trace_out[k].append(temp[i])
trace_out[k] = np.array(trace_out[k])
if trace_out[k].shape[0] == dim:
trace_out[k] = np.reshape(trace_out[k],(trace_out[k].shape[1],trace_out[k].shape[0]))

# Cut off at the end
cutoff = get_cutoff(trace_out,p,thresh)
for k in trace.keys():
trace_out[k] = trace_out[k][:cutoff]

# smooth rho and T
trace_out['Tgk'] = trace_out['T']*cgs['MEV']/cgs['GK']
trace_out['Tgk'] = trace_out['T'] * Tunit

# heavies don't matter
nu_ab = (np.abs(trace_out['rate_absorbed'][:,0])
Expand All @@ -326,9 +334,7 @@ def cleanup_trace(trace,
ye_cutoff = 0
ye_avg = np.mean(trace_out['Ye'][ye_cutoff:])
ye_std = np.std(trace_out['Ye'][ye_cutoff:])
# trace_out['Ye_avg'] = trace_out['Ye'].copy()
trace_out['Ye_avg'] = ye_avg*np.ones_like(trace_out['Ye'])
# trace_out['Ye_avg'] = signal_smooth(trace_out['Ye_avg'],11)

# R
trace_out['R'] = np.sqrt(trace_out['Xcart'][:,0]**2
Expand Down Expand Up @@ -363,15 +369,11 @@ def meta_file(idx,mass,mass_unit,T_initx,rho_initx,total_mass = None):
mass *= mass_unit
with open('metadata.dat','w') as f:
if total_mass is not None:
#f.write("# 1:id 2:mass[g] 3:mass fraction\n")
#f.write("{} {:.6e} {:.6e}\n".format(idx,mass,mass_fraction))
f.write("# 1:id 2:mass[g] 3:mass fraction 4:T_initx[GK] 5:rho_initx[g/cm3]\n")
f.write("{} {:.6e} {:.6e} {:.6e} {:.6e}\n".format(idx,mass,mass_fraction,T_initx,rho_initx))
else:
f.write("# 1:id 2:mass[g] 3:T_smooth[GK] 4:rho_initx[g/cm3]\n")
f.write("{} {:.6e} {:.6e} {:.6e}\n".format(idx,mass,T_initx,rho_initx))
#f.write("# 1:id 2:mass[g]\n")
#f.write("{} {:.6e}\n".format(idx,mass))

def extrapolate_trace(trace,T9,atol,p,thresh,tfinal = 1e3):
# extrapolation
Expand Down Expand Up @@ -415,14 +417,11 @@ def convert_file(tracer_file,outdir,
os.makedirs(directory,exist_ok=True)
os.chdir(directory)
to_file(trace)
#meta_file(trace['id'],trace['mass'],mass_unit,trace['T_smooth'])
meta_file(trace['id'],trace['mass'],mass_unit,trace['T_smooth'],trace['rho_smooth'])
if frdm_path is not None:
Ye = trace['Ye_avg'][0]
#rho = trace['rho'][0]
rhosmooth = trace['rho_smooth']
Tsmooth = trace['T_smooth']
#frdm_comp(Ye,T9,rho,'initx.dat',frdm_path)
rhosmooth = trace['rho_smooth'] # This is a single value, not an array
Tsmooth = trace['T_smooth'] # This is a single value, not an array
frdm_comp(Ye,Tsmooth,rhosmooth,'initx.dat',frdm_path)
print('initx file written.')
os.chdir(currdir)
Expand Down