From 458694345d3f16c8bbf0942e82110b2f54ee569b Mon Sep 17 00:00:00 2001 From: Jonah Miller Date: Thu, 2 Feb 2023 09:57:16 -0700 Subject: [PATCH 1/6] Update tracers_to_PRISM.py --- script/analysis/tracers_to_PRISM.py | 99 +++++++++++++++++------------ 1 file changed, 57 insertions(+), 42 deletions(-) diff --git a/script/analysis/tracers_to_PRISM.py b/script/analysis/tracers_to_PRISM.py index 562f1a6..d2245f5 100755 --- a/script/analysis/tracers_to_PRISM.py +++ b/script/analysis/tracers_to_PRISM.py @@ -274,36 +274,42 @@ def cleanup_trace(trace, #Tgk = trace['T']*cgs['MEV']/cgs['GK'] Tgk = trsm['T']*cgs['MEV']/cgs['GK'] TgkT9 = value_crossing(Tgk, T9) - sidx = np.where(TgkT9)[0][-1] + 1 - - # Sometimes the temperature is not actually that close to T9=10, - # so this will do a little interpolation to find a suitable - # starting point. - - if T9 > Tgk[sidx:][0] > T9-atol or Tgk[0] < T9: - if Tgk[0] < T9: - print("WARNING: Tracer {} starting at temperature {} < T9 = {}".format(trace['id'][0], Tgk[0], T9)) - sidx = 0 - for k in trace.keys() - ['T','rho']: - trace_out[k] = trace[k][sidx:] - trace_out['T'],trace_out['rho'] = trsm['T'][sidx:],trsm['rho'][sidx:] - else: - interpsidx = interpolate.interp1d(trace['time'][sidx-1:sidx+1],Tgk[sidx-1:sidx+1]) - residual = lambda t: interpsidx(t) - T9 - root_results = optimize.root_scalar(residual, bracket=(trace['time'][sidx-1],trace['time'][sidx+1])) - if not root_results.converged: - raise ValueError("Root finding failed for tracer {}".format(trace['id'][0])) - new_time = root_results.root - new_val = interpsidx(new_time) - trace_out['T'] = np.insert(trsm['T'][sidx:],0,new_val/Tunit) - trace_out['time'] = np.insert(trace['time'][sidx:],0,new_time) - trace_out['time'] = np.insert(trace['time'][sidx:],0,newx[ind]) - # Still need to get corresponding other key values - for k in trace.keys() - ['time','T']: - trace_in = trsm if k in trsm.keys() else trace - interpsidx = interpolate.interp1d(trace['time'][sidx-1:sidx+1],trace_in[k][sidx-1:sidx+1],axis=0) - trace_out[k] = np.insert(trace_in[k][sidx:],0,interpsidx(new_time),axis=0) + try: + sidx = np.where(TgkT9)[0][-1] + 1 + # Sometimes the temperature is not actually that close to T9=10, + # so this will do a little interpolation to find a suitable + # starting point. + if T9 > Tgk[sidx:][0] > T9-atol or Tgk[0] < T9: + if Tgk[0] < T9: + print("WARNING: Tracer {} starting at temperature {} < T9 = {}".format(trace['id'][0], Tgk[0], T9)) + sidx = 0 + for k in trace.keys() - ['T','rho']: + trace_out[k] = trace[k][sidx:] + trace_out['T'],trace_out['rho'] = trsm['T'][sidx:],trsm['rho'][sidx:] + else: + interpsidx = interpolate.interp1d(trace['time'][sidx-1:sidx+1],Tgk[sidx-1:sidx+1]) + residual = lambda t: interpsidx(t) - T9 + root_results = optimize.root_scalar(residual, bracket=(trace['time'][sidx-1],trace['time'][sidx+1])) + if not root_results.converged: + raise ValueError("Root finding failed for tracer {}".format(trace['id'][0])) + new_time = root_results.root + new_val = interpsidx(new_time) + trace_out['T'] = np.insert(trsm['T'][sidx:],0,new_val/Tunit) + trace_out['time'] = np.insert(trace['time'][sidx:],0,new_time) + trace_out['time'] = np.insert(trace['time'][sidx:],0,newx[ind]) + # Still need to get corresponding other key values + for k in trace.keys() - ['time','T']: + trace_in = trsm if k in trsm.keys() else trace + interpsidx = interpolate.interp1d(trace['time'][sidx-1:sidx+1],trace_in[k][sidx-1:sidx+1],axis=0) + trace_out[k] = np.insert(trace_in[k][sidx:],0,interpsidx(new_time),axis=0) + except (ValueError, IndexError) as error: + print("WARNING: Tracer {} starting at temperature {} < T9 = {}".format(trace['id'][0], Tgk[0], T9)) + sidx = 0 + for k in trace.keys() - ['T','rho']: + trace_out[k] = trace[k][sidx:] + trace_out['T'],trace_out['rho'] = trsm['T'][sidx:],trsm['rho'][sidx:] + # Cut off at the end cutoff = get_cutoff(trace_out,p,thresh) for k in trace.keys(): @@ -357,21 +363,27 @@ def to_file(trace, header=header, comments = '# ') -def meta_file(idx,mass,mass_unit,T_initx,rho_initx,total_mass = None): +def meta_file(idx, mass, mass_unit, T_initx, Tmax, + rho_initx = None,total_mass = None): if total_mass is not None: mass_fraction = mass / total_mass mass *= mass_unit with open('metadata.dat','w') as f: + if rho_initx is not None: + if total_mass is not None: + f.write("# 1:id 2:mass[g] 3:mass fraction 4:T_initx[GK] 5:rho_initx[g/cm3] 6:T_max[GK]\n") + f.write("{} {:.6e} {:.6e} {:.6e} {:.6e} {:.6e}\n".format(idx,mass,mass_fraction,T_initx,rho_initx, Tmax)) + else: + f.write("# 1:id 2:mass[g] 3:T_smooth[GK] 4:rho_initx[g/cm3] 5: T_max[GK]\n") + f.write("{} {:.6e} {:.6e} {:.6e} {:.6e}\n".format(idx,mass,T_initx,rho_initx, Tmax)) + else: if total_mass is not None: - #f.write("# 1:id 2:mass[g] 3:mass fraction\n") - #f.write("{} {:.6e} {:.6e}\n".format(idx,mass,mass_fraction)) - f.write("# 1:id 2:mass[g] 3:mass fraction 4:T_initx[GK] 5:rho_initx[g/cm3]\n") - f.write("{} {:.6e} {:.6e} {:.6e} {:.6e}\n".format(idx,mass,mass_fraction,T_initx,rho_initx)) + f.write("# 1:id 2:mass[g] 3:mass fraction 4:T_initx[GK] 5: T_max[GK]\n") + f.write("{} {:.6e} {:.6e} {:.6e} {:.6e}\n".format(idx,mass,mass_fraction,T_initx, Tmax)) else: - f.write("# 1:id 2:mass[g] 3:T_smooth[GK] 4:rho_initx[g/cm3]\n") - f.write("{} {:.6e} {:.6e} {:.6e}\n".format(idx,mass,T_initx,rho_initx)) - #f.write("# 1:id 2:mass[g]\n") - #f.write("{} {:.6e}\n".format(idx,mass)) + f.write("# 1:id 2:mass[g] 3:T_smooth[GK] 4: T_max[GK]\n") + f.write("{} {:.6e} {:.6e} {:.6e}\n".format(idx,mass,T_initx, Tmax)) + def extrapolate_trace(trace,T9,atol,p,thresh,tfinal = 1e3): # extrapolation @@ -415,8 +427,8 @@ def convert_file(tracer_file,outdir, os.makedirs(directory,exist_ok=True) os.chdir(directory) to_file(trace) - #meta_file(trace['id'],trace['mass'],mass_unit,trace['T_smooth']) - meta_file(trace['id'],trace['mass'],mass_unit,trace['T_smooth'],trace['rho_smooth']) + meta_file(trace['id'],trace['mass'],mass_unit, + trace['T_smooth'],trace['Tgk'].max(), trace['rho_smooth']) if frdm_path is not None: Ye = trace['Ye_avg'][0] #rho = trace['rho'][0] @@ -461,6 +473,7 @@ def convert_ids(tracers,directory, os.chdir(directory) for i,idx in enumerate(ids): print("\t...",i,idx) + trace = tracers.get_trace(idx) trace = extrapolate_trace(trace,T9,atol,p,thresh, tfinal) istr = "{:08d}".format(i) @@ -468,7 +481,8 @@ def convert_ids(tracers,directory, os.mkdir(istr) os.chdir(istr) to_file(trace) - meta_file(idx,trace['mass'],mass_unit,total_mass) + meta_file(idx,trace['mass'],mass_unit, trace['T_smooth'], trace['Tgk'].max(), + trace['rho_smooth'], total_mass) if frdm_path is not None: Ye = trace['Ye_avg'][0] rho = trace['rho'][0] @@ -479,7 +493,8 @@ def convert_ids(tracers,directory, def convert_path_by_time(inpath,outdir, T9 = 10, atol = 0.3, p = 6, thresh = 1e-1, - frdm_path = None): + frdm_path = None, + tfinal = 1e3): tracers = io.TracerData.frompath(inpath) convert_ids(tracers,outdir, T9=T9,atol=atol, From 96c387ceb3581d25f4903d9d7b54d73c251bacd7 Mon Sep 17 00:00:00 2001 From: kelslund <84802420+kelslund@users.noreply.github.com> Date: Tue, 14 Feb 2023 14:34:02 -0500 Subject: [PATCH 2/6] Update tracers_to_PRISM.py --- script/analysis/tracers_to_PRISM.py | 156 +++++++++++++--------------- 1 file changed, 70 insertions(+), 86 deletions(-) diff --git a/script/analysis/tracers_to_PRISM.py b/script/analysis/tracers_to_PRISM.py index d2245f5..e67ae86 100755 --- a/script/analysis/tracers_to_PRISM.py +++ b/script/analysis/tracers_to_PRISM.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # Little script to convert tracer data to a form suitable for PRISM # Author: Jonah Miller (jonah.maxwell.miller@gmail.com) -# Based on the work of Matt Mumpower +# Based on the work of Matt Mumpower, mods by Kels from __future__ import print_function, division import sys; sys.dont_write_bytecode = True @@ -174,23 +174,19 @@ def frdm_comp(Ye, T9, rho, filename, frdm_path): def signal_smooth(x,window_len=11,window='hanning'): """From scipy cookbook: https://scipy-cookbook.readthedocs.io/items/SignalSmooth.html - input: - x: the input signal - window_len: the dimension of the smoothing window; - should be an odd integer - window: the type of window from - 'flat', 'hanning', 'hamming', 'bartlett', 'blackman' - flat window will produce a moving average smoothing. - + x: the input signal + window_len: the dimension of the smoothing window; + should be an odd integer + window: the type of window from + 'flat', 'hanning', 'hamming', 'bartlett', 'blackman' + flat window will produce a moving average smoothing. output: - the smoothed signal - + the smoothed signal example: - - t=linspace(-2,2,0.1) - x=sin(t)+randn(len(t))*0.1 - y=smooth(x) + t=linspace(-2,2,0.1) + x=sin(t)+randn(len(t))*0.1 + y=smooth(x) """ if x.ndim != 1: raise ValueError("smooth only accepts 1 dimension arrays.") @@ -259,7 +255,6 @@ def value_crossing(a, v): second = residual[1:] return np.logical_and((first*second)<=0,first > 0) - def cleanup_trace(trace, T9 = 10, atol = 1, p = 6, thresh = 1e-1): @@ -267,56 +262,63 @@ def cleanup_trace(trace, # output trace trsm,trace_out = {},{} Tunit = cgs['MEV']/cgs['GK'] - trsm['T'],trsm['rho'] = log_smooth(trace['T']),log_smooth(trace['rho']) - + # starting temperature - #Tgk = trace['T']*cgs['MEV']/cgs['GK'] - Tgk = trsm['T']*cgs['MEV']/cgs['GK'] + Tgk = trsm['T']*Tunit TgkT9 = value_crossing(Tgk, T9) - - try: - sidx = np.where(TgkT9)[0][-1] + 1 - # Sometimes the temperature is not actually that close to T9=10, - # so this will do a little interpolation to find a suitable - # starting point. - if T9 > Tgk[sidx:][0] > T9-atol or Tgk[0] < T9: - if Tgk[0] < T9: - print("WARNING: Tracer {} starting at temperature {} < T9 = {}".format(trace['id'][0], Tgk[0], T9)) - sidx = 0 - for k in trace.keys() - ['T','rho']: - trace_out[k] = trace[k][sidx:] - trace_out['T'],trace_out['rho'] = trsm['T'][sidx:],trsm['rho'][sidx:] - else: - interpsidx = interpolate.interp1d(trace['time'][sidx-1:sidx+1],Tgk[sidx-1:sidx+1]) - residual = lambda t: interpsidx(t) - T9 - root_results = optimize.root_scalar(residual, bracket=(trace['time'][sidx-1],trace['time'][sidx+1])) - if not root_results.converged: - raise ValueError("Root finding failed for tracer {}".format(trace['id'][0])) - new_time = root_results.root - new_val = interpsidx(new_time) - trace_out['T'] = np.insert(trsm['T'][sidx:],0,new_val/Tunit) - trace_out['time'] = np.insert(trace['time'][sidx:],0,new_time) - trace_out['time'] = np.insert(trace['time'][sidx:],0,newx[ind]) - # Still need to get corresponding other key values - for k in trace.keys() - ['time','T']: - trace_in = trsm if k in trsm.keys() else trace - interpsidx = interpolate.interp1d(trace['time'][sidx-1:sidx+1],trace_in[k][sidx-1:sidx+1],axis=0) - trace_out[k] = np.insert(trace_in[k][sidx:],0,interpsidx(new_time),axis=0) - except (ValueError, IndexError) as error: - print("WARNING: Tracer {} starting at temperature {} < T9 = {}".format(trace['id'][0], Tgk[0], T9)) + # Need to account for possibility that there is no T9 crossing point + # in which case TgkT9 will just be an array full of False values + if len(np.where(TgkT9)[0]) == 0: sidx = 0 - for k in trace.keys() - ['T','rho']: - trace_out[k] = trace[k][sidx:] - trace_out['T'],trace_out['rho'] = trsm['T'][sidx:],trsm['rho'][sidx:] - + print('No T > T9 = ',T9,' exists. Starting at T = ',Tgk[sidx]) + else: + sidx = np.where(TgkT9)[0][-1] + 1 + print('Initial is < T9 of',T9,'; Starting at crossing point T = ',Tgk[sidx]) + + # Sometimes the temperature is not actually that close to T9=10, + # so this will do a little interpolation to find a suitable + # starting point. + if T9 > Tgk[sidx:][0] > T9-atol or Tgk[0] < T9: + print(Tgk[sidx:][0]) + for k in trace.keys() - ['T','rho']: + trace_out[k] = trace[k][sidx:] + trace_out['T'],trace_out['rho'] = trsm['T'][sidx:],trsm['rho'][sidx:] + else: + interpsidx = interpolate.interp1d(trace['time'][sidx-1:sidx+1],Tgk[sidx-1:sidx+1]) + newx = np.arange(trace['time'][sidx-1],trace['time'][sidx],0.1) + tgk_interp = interpsidx(newx) + ind = np.where(np.isclose(tgk_interp,T9,atol=0.1))[0][-1] + trace_out['T'] = np.insert(trsm['T'][sidx:],0,tgk_interp[ind]/Tunit) + trace_out['time'] = np.insert(trace['time'][sidx:],0,newx[ind]) + # Still need to get corresponding other key values + for k in trace.keys() - ['time','T']: + if trace[k].size == len(trace): + if k == 'rho': + interpsidx = interpolate.interp1d(trace['time'][sidx-1:sidx+1],trsm[k][sidx-1:sidx+1]) + interp = interpsidx(newx) + trace_out[k] = np.insert(trsm[k][sidx:],0,interp[ind]) + else: + interpsidx = interpolate.interp1d(trace['time'][sidx-1:sidx+1],trace[k][sidx-1:sidx+1]) + interp = interpsidx(newx) + trace_out[k] = np.insert(trace[k][sidx:],0,interp[ind]) + else: + dim = int(trace[k].size/len(trace)) + temp,trace_out[k] = {},[] + for i in range(dim): + interpsidx = interpolate.interp1d(trace['time'][sidx-1:sidx+1],trace[k][:,i][sidx-1:sidx+1]) + interp = interpsidx(newx) + temp[i] = np.insert(trace[k][sidx:][:,i],0,interp[ind]) + trace_out[k].append(temp[i]) + trace_out[k] = np.array(trace_out[k]) + if trace_out[k].shape[0] == dim: + trace_out[k] = np.reshape(trace_out[k],(trace_out[k].shape[1],trace_out[k].shape[0])) + # Cut off at the end cutoff = get_cutoff(trace_out,p,thresh) for k in trace.keys(): trace_out[k] = trace_out[k][:cutoff] - - # smooth rho and T - trace_out['Tgk'] = trace_out['T']*cgs['MEV']/cgs['GK'] + trace_out['Tgk'] = trace_out['T'] * Tunit # heavies don't matter nu_ab = (np.abs(trace_out['rate_absorbed'][:,0]) @@ -332,9 +334,7 @@ def cleanup_trace(trace, ye_cutoff = 0 ye_avg = np.mean(trace_out['Ye'][ye_cutoff:]) ye_std = np.std(trace_out['Ye'][ye_cutoff:]) - # trace_out['Ye_avg'] = trace_out['Ye'].copy() trace_out['Ye_avg'] = ye_avg*np.ones_like(trace_out['Ye']) - # trace_out['Ye_avg'] = signal_smooth(trace_out['Ye_avg'],11) # R trace_out['R'] = np.sqrt(trace_out['Xcart'][:,0]**2 @@ -363,27 +363,17 @@ def to_file(trace, header=header, comments = '# ') -def meta_file(idx, mass, mass_unit, T_initx, Tmax, - rho_initx = None,total_mass = None): +def meta_file(idx,mass,mass_unit,T_initx,rho_initx,total_mass = None): if total_mass is not None: mass_fraction = mass / total_mass mass *= mass_unit with open('metadata.dat','w') as f: - if rho_initx is not None: if total_mass is not None: - f.write("# 1:id 2:mass[g] 3:mass fraction 4:T_initx[GK] 5:rho_initx[g/cm3] 6:T_max[GK]\n") - f.write("{} {:.6e} {:.6e} {:.6e} {:.6e} {:.6e}\n".format(idx,mass,mass_fraction,T_initx,rho_initx, Tmax)) + f.write("# 1:id 2:mass[g] 3:mass fraction 4:T_initx[GK] 5:rho_initx[g/cm3]\n") + f.write("{} {:.6e} {:.6e} {:.6e} {:.6e}\n".format(idx,mass,mass_fraction,T_initx,rho_initx)) else: - f.write("# 1:id 2:mass[g] 3:T_smooth[GK] 4:rho_initx[g/cm3] 5: T_max[GK]\n") - f.write("{} {:.6e} {:.6e} {:.6e} {:.6e}\n".format(idx,mass,T_initx,rho_initx, Tmax)) - else: - if total_mass is not None: - f.write("# 1:id 2:mass[g] 3:mass fraction 4:T_initx[GK] 5: T_max[GK]\n") - f.write("{} {:.6e} {:.6e} {:.6e} {:.6e}\n".format(idx,mass,mass_fraction,T_initx, Tmax)) - else: - f.write("# 1:id 2:mass[g] 3:T_smooth[GK] 4: T_max[GK]\n") - f.write("{} {:.6e} {:.6e} {:.6e}\n".format(idx,mass,T_initx, Tmax)) - + f.write("# 1:id 2:mass[g] 3:T_smooth[GK] 4:rho_initx[g/cm3]\n") + f.write("{} {:.6e} {:.6e} {:.6e}\n".format(idx,mass,T_initx,rho_initx)) def extrapolate_trace(trace,T9,atol,p,thresh,tfinal = 1e3): # extrapolation @@ -427,14 +417,11 @@ def convert_file(tracer_file,outdir, os.makedirs(directory,exist_ok=True) os.chdir(directory) to_file(trace) - meta_file(trace['id'],trace['mass'],mass_unit, - trace['T_smooth'],trace['Tgk'].max(), trace['rho_smooth']) + meta_file(trace['id'],trace['mass'],mass_unit,trace['T_smooth'],trace['rho_smooth']) if frdm_path is not None: Ye = trace['Ye_avg'][0] - #rho = trace['rho'][0] - rhosmooth = trace['rho_smooth'] - Tsmooth = trace['T_smooth'] - #frdm_comp(Ye,T9,rho,'initx.dat',frdm_path) + rhosmooth = trace['rho_smooth'] # This is a single value, not an array + Tsmooth = trace['T_smooth'] # This is a single value, not an array frdm_comp(Ye,Tsmooth,rhosmooth,'initx.dat',frdm_path) print('initx file written.') os.chdir(currdir) @@ -473,7 +460,6 @@ def convert_ids(tracers,directory, os.chdir(directory) for i,idx in enumerate(ids): print("\t...",i,idx) - trace = tracers.get_trace(idx) trace = extrapolate_trace(trace,T9,atol,p,thresh, tfinal) istr = "{:08d}".format(i) @@ -481,8 +467,7 @@ def convert_ids(tracers,directory, os.mkdir(istr) os.chdir(istr) to_file(trace) - meta_file(idx,trace['mass'],mass_unit, trace['T_smooth'], trace['Tgk'].max(), - trace['rho_smooth'], total_mass) + meta_file(idx,trace['mass'],mass_unit,total_mass) if frdm_path is not None: Ye = trace['Ye_avg'][0] rho = trace['rho'][0] @@ -493,8 +478,7 @@ def convert_ids(tracers,directory, def convert_path_by_time(inpath,outdir, T9 = 10, atol = 0.3, p = 6, thresh = 1e-1, - frdm_path = None, - tfinal = 1e3): + frdm_path = None): tracers = io.TracerData.frompath(inpath) convert_ids(tracers,outdir, T9=T9,atol=atol, From c56d6c5682ad713ece446fd9df830854087f6521 Mon Sep 17 00:00:00 2001 From: kelslund <84802420+kelslund@users.noreply.github.com> Date: Sun, 19 Feb 2023 16:26:43 -0500 Subject: [PATCH 3/6] Create accumulate_PRISM.py New script to pull init data from PRISM trajectory files --- script/analysis/accumulate_PRISM.py | 98 +++++++++++++++++++++++++++++ 1 file changed, 98 insertions(+) create mode 100644 script/analysis/accumulate_PRISM.py diff --git a/script/analysis/accumulate_PRISM.py b/script/analysis/accumulate_PRISM.py new file mode 100644 index 0000000..f680462 --- /dev/null +++ b/script/analysis/accumulate_PRISM.py @@ -0,0 +1,98 @@ +#!/usr/bin/env python + +""" +Author: Kelsey Lund but based on Jonah Miller's analysis scripts + +Accumulate tracer data into a single file with values that are used as inputs for PRISM calculations. +For now, this should only be used as a post-processing step in the sense that it assumes PRISM +trajectory files already exist. +To do: Build this structure into the cleanup_trace function in tracers_to_PRISM so that one of the +outputs from that whole procedure is the accumulated file. +""" + +import glob +import traceback +import numpy as np +import hdf5_to_dict as io +from multiprocessing import Pool +import units; units = units.get_cgs() + +def find_t_closest(input,time): + tlst = [] + for t in range(len(input)): + tlst.append(abs(input[t] - time)) + return tlst.index(min(tlst)) + +def find_PRISM_start_t(trace_ID,dirt): + # Pulls out starting time of PRISM trajectory file (in seconds, beware!) + tstart = np.loadtxt(dirt + '/trace_%08d'%trace_ID + '/trajectory.dat')[0][0] + return tstart + +def get_IDs(dirt): + # dirt should be something like 'analysis/traces/' WITH trailing backslash(for now) + # dirt should be traces directory that has (usually) 100 subdirectories + IDs = [] + subdirs = sorted(glob.glob(dirt + '*/')) + for subdir in subdirs: + tr_ids = glob.glob(subdir + 'trace_*.td') + for ID in tr_ids: + IDs.append(ID) + return IDs + +def loop(inputs):#tr_id,dirt): + try: + tr_id,pr_dirt = inputs # pr_dirt should be where PRISM trajectory files are- something like '/analysis/done_DND/' + trace = io.TracerData.fromfile(tr_id) + trace_times = trace['time'] * trace.units['T_unit'] + vals = {} + + start_t = find_PRISM_start_t(tr_id, prdirt) + index = find_t_closest(trace_times,start_t) + + for key in trace.keys(): + vals[key] = [trace[key][index]] + return vals + except: + traceback.print_exc() +#---------------------------------------------------------------------------------------# + +parser = ArgumentParser() +parser.add_argument('trace_dir', type = str, + help = ('Directory where subdirectories containing tracer files by IDs are located.')) + +parser.add_argument('prism_dir', type=str, + help = ('Directory where PRISM trajectory files are kept.')) + +parser.add_argument('accumulated',type=str, + help = ('Name of file to save accumulated tracers to.')) + +parser.add_argument('-n','--nprocs',type=int, default = None, + help=('Number of parallel procs to use.')) + +if __name__ == "__main__": + try: + args = parser.parse_args() + + fnams = get_IDs(args.trace_dir) + p = Pool(args.nprocs) + + inputs = [] + vals = {} + for f in fnams: + inputs.append((f,args.prism_dir)) + vals_out = p.map(loop,inputs) + for key in vals_out.keys(): + if key not in vals.keys(): + vals[key] = vals[key] + else: + vals[key] = np.concatenate((vals[key],vals_out[key])) + except: + traceback.print_exc() + + + +#trace_dir = '/users/klund/scratch4/nubhlight/torus_gw/analysis/traces/' +#prism_dir = '/users/klund/scratch4/nubhlight/torus_gw/analysis/done_DND/' +#nprocs = 10 + + From fd8e6fb9d12fc735bdbdda3ade4e9d7188cc2df6 Mon Sep 17 00:00:00 2001 From: kelslund <84802420+kelslund@users.noreply.github.com> Date: Sun, 19 Feb 2023 16:36:49 -0500 Subject: [PATCH 4/6] Updated new script Added save command- need to test --- script/analysis/accumulate_PRISM.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/script/analysis/accumulate_PRISM.py b/script/analysis/accumulate_PRISM.py index f680462..00724db 100644 --- a/script/analysis/accumulate_PRISM.py +++ b/script/analysis/accumulate_PRISM.py @@ -11,6 +11,7 @@ """ import glob +import pickle import traceback import numpy as np import hdf5_to_dict as io @@ -86,11 +87,14 @@ def loop(inputs):#tr_id,dirt): vals[key] = vals[key] else: vals[key] = np.concatenate((vals[key],vals_out[key])) + + with open(args.accumulated,'wb') as f: + pickle.dump(vals, f) except: traceback.print_exc() - + with open('test.td','wb') as f: #trace_dir = '/users/klund/scratch4/nubhlight/torus_gw/analysis/traces/' #prism_dir = '/users/klund/scratch4/nubhlight/torus_gw/analysis/done_DND/' #nprocs = 10 From 2149daad8ec899fc59d5847b1ae85a544632a9f6 Mon Sep 17 00:00:00 2001 From: kelslund <84802420+kelslund@users.noreply.github.com> Date: Sun, 19 Feb 2023 16:47:36 -0500 Subject: [PATCH 5/6] Update accumulate_PRISM.py --- script/analysis/accumulate_PRISM.py | 36 +++++++++++------------------ 1 file changed, 14 insertions(+), 22 deletions(-) diff --git a/script/analysis/accumulate_PRISM.py b/script/analysis/accumulate_PRISM.py index 00724db..536fe38 100644 --- a/script/analysis/accumulate_PRISM.py +++ b/script/analysis/accumulate_PRISM.py @@ -1,14 +1,14 @@ #!/usr/bin/env python -""" -Author: Kelsey Lund but based on Jonah Miller's analysis scripts - -Accumulate tracer data into a single file with values that are used as inputs for PRISM calculations. -For now, this should only be used as a post-processing step in the sense that it assumes PRISM -trajectory files already exist. -To do: Build this structure into the cleanup_trace function in tracers_to_PRISM so that one of the -outputs from that whole procedure is the accumulated file. -""" +#""" +# Author: Kelsey Lund but based on Jonah Miller's analysis scripts +# +# Accumulate tracer data into a single file with values that are used as inputs for PRISM calculations. +# For now, this should only be used as a post-processing step in the sense that it assumes PRISM +# trajectory files already exist. +# To do: Build this structure into the cleanup_trace function in tracers_to_PRISM so that one of the +# outputs from that whole procedure is the accumulated file. +# """ import glob import pickle @@ -16,6 +16,7 @@ import numpy as np import hdf5_to_dict as io from multiprocessing import Pool +from argparse import ArgumentParser import units; units = units.get_cgs() def find_t_closest(input,time): @@ -58,17 +59,10 @@ def loop(inputs):#tr_id,dirt): #---------------------------------------------------------------------------------------# parser = ArgumentParser() -parser.add_argument('trace_dir', type = str, - help = ('Directory where subdirectories containing tracer files by IDs are located.')) - -parser.add_argument('prism_dir', type=str, - help = ('Directory where PRISM trajectory files are kept.')) - -parser.add_argument('accumulated',type=str, - help = ('Name of file to save accumulated tracers to.')) - -parser.add_argument('-n','--nprocs',type=int, default = None, - help=('Number of parallel procs to use.')) +parser.add_argument('trace_dir', type = str, help = ('Directory where subdirectories containing tracer files by IDs are located.')) +parser.add_argument('prism_dir', type=str, help = ('Directory where PRISM trajectory files are kept.')) +parser.add_argument('accumulated',type=str, help = ('Name of file to save accumulated tracers to.')) +parser.add_argument('-n','--nprocs',type=int, default = None, help=('Number of parallel procs to use.')) if __name__ == "__main__": try: @@ -93,8 +87,6 @@ def loop(inputs):#tr_id,dirt): except: traceback.print_exc() - - with open('test.td','wb') as f: #trace_dir = '/users/klund/scratch4/nubhlight/torus_gw/analysis/traces/' #prism_dir = '/users/klund/scratch4/nubhlight/torus_gw/analysis/done_DND/' #nprocs = 10 From 908e9c29361d29f56b51c878f019a9d9a3d201d8 Mon Sep 17 00:00:00 2001 From: kelslund <84802420+kelslund@users.noreply.github.com> Date: Sun, 19 Feb 2023 17:26:00 -0500 Subject: [PATCH 6/6] Update accumulate_PRISM.py Fixed some workflow bugs --- script/analysis/accumulate_PRISM.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/script/analysis/accumulate_PRISM.py b/script/analysis/accumulate_PRISM.py index 536fe38..8dcba56 100644 --- a/script/analysis/accumulate_PRISM.py +++ b/script/analysis/accumulate_PRISM.py @@ -27,6 +27,7 @@ def find_t_closest(input,time): def find_PRISM_start_t(trace_ID,dirt): # Pulls out starting time of PRISM trajectory file (in seconds, beware!) + trid = int(trace_ID.split('trace_')[-1].split('.td')[0]) tstart = np.loadtxt(dirt + '/trace_%08d'%trace_ID + '/trajectory.dat')[0][0] return tstart @@ -48,7 +49,7 @@ def loop(inputs):#tr_id,dirt): trace_times = trace['time'] * trace.units['T_unit'] vals = {} - start_t = find_PRISM_start_t(tr_id, prdirt) + start_t = find_PRISM_start_t(tr_id, pr_dirt) index = find_t_closest(trace_times,start_t) for key in trace.keys(): @@ -92,3 +93,12 @@ def loop(inputs):#tr_id,dirt): #nprocs = 10 +#trace_dir = '/users/klund/scratch4/nubhlight/torus_gw/analysis/traces/' +#prism_dir = '/users/klund/scratch4/nubhlight/torus_gw/analysis/done_DND/' +#fnams = accp.get_IDs(trace_dir) +#from multiprocessing import Pool +#p = Pool(10) +#inputs,vals = [],{} +#for f in fnams: +# inputs.append((f,prism_dir)) +# vals_out = p.map(accP.loop,inputs) \ No newline at end of file