Skip to content

Commit

Permalink
Add time averaging script
Browse files Browse the repository at this point in the history
  • Loading branch information
brryan committed Dec 13, 2023
1 parent cb17583 commit 6ad19c9
Show file tree
Hide file tree
Showing 3 changed files with 130 additions and 1 deletion.
1 change: 1 addition & 0 deletions scripts/python/get_torus_radial_profiles.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,7 @@ def get_torus_radial_profiles(dfile):

beta = Pg_sadw / Pm_sadw

# TODO(BRR) Include dump time
profiles['x1'] = x1
profiles['r'] = r
profiles['Volume'] = Volume
Expand Down
2 changes: 1 addition & 1 deletion scripts/python/phoedf.py
Original file line number Diff line number Diff line change
Expand Up @@ -413,7 +413,7 @@ def Getvpcov(self):
vpcon = self.Getvpcon()
for ii in range(3):
for jj in range(3):
vpcov[:,ii,:,:,:] += self.gcov[:, ii + 1,jj + 1,:,:,:]*vpcon[:,jj,:,:,:]
self.vpcov[:,ii,:,:,:] += self.gcov[:, ii + 1,jj + 1,:,:,:]*vpcon[:,jj,:,:,:]

return self.vpcov

Expand Down
128 changes: 128 additions & 0 deletions scripts/python/tavg_torus_radial_profiles.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,128 @@
#!/usr/bin/env python

# © 2023. Triad National Security, LLC. All rights reserved. This
# program was produced under U.S. Government contract
# 89233218CNA000001 for Los Alamos National Laboratory (LANL), which
# is operated by Triad National Security, LLC for the U.S. Department
# of Energy/National Nuclear Security Administration. All rights in
# the program are reserved by Triad National Security, LLC, and the
# U.S. Department of Energy/National Nuclear Security
# Administration. The Government is granted for itself and others
# acting on its behalf a nonexclusive, paid-up, irrevocable worldwide
# license in this material to reproduce, prepare derivative works,
# distribute copies to the public, perform publicly and display
# publicly, and to permit others to do so.

import argparse
import numpy as np
import sys
import os
from phoedf import phoedf
from multiprocessing import Pool

if __name__ == "__main__":

parser = argparse.ArgumentParser(description="Time-average radial profiles from torus dumps")
parser.add_argument(
"filenames", type=str, nargs="+", help="Files to derive radial profiles for"
)
parser.add_argument(
"--overwrite", action="store_true", help="Whether to overwrite existing outputs"
)
parser.add_argument(
"--tmin", type=float, default=0, help="Minimum time to begin averaging"
)
parser.add_argument(
"--dt", type=float, required=True, help="Time window over which to separate averages"
)
parser.add_argument(
"--dt_sim", type=float, required=True, help="Dump cadence"
)
args = parser.parse_args()

print("Phoebus GRMHD analysis script for time-averaging radial profiles from dumps")
print(f" Number of files: {len(args.filenames)}")
print(f" Overwrite? {args.overwrite}")

tavgs = {}

keys_to_tavg = ['r', 'F_M', 'F_M_in', 'F_M_out', 'F_Eg', 'F_Pg', 'F_Lg', 'beta', 'vconr', 'vcovr']

for n, filename in enumerate(args.filenames):
base_directory = os.path.dirname(filename)
base_filename = os.path.basename(filename)
print(f"Processing file {base_filename}")

# Overly specific way to get time
nfile = int(base_filename[11:19])
tfile = args.dt_sim * nfile

avg_idx = int(tfile / args.dt)
avg_key = str(avg_idx)

if avg_key not in tavgs.keys():
tavgs[avg_key] = {}
tavgs[avg_key]['nfiles'] = 0
tavgs[avg_key]['tmin'] = avg_idx * args.dt
tavgs[avg_key]['dt'] = args.dt

data_indices = {}

with open(filename, "r") as profile:
lines = profile.readlines()
for idx, line in enumerate(lines):
line = line.strip()
for key in keys_to_tavg:
if line == key:
data_indices[key] = idx + 1

# Initialize data if necessary
ndata = len(np.fromstring(lines[data_indices[keys_to_tavg[0]]].strip(), dtype=float, sep=' '))
if tavgs[avg_key]['nfiles'] == 0:
for key in keys_to_tavg:
tavgs[avg_key][key] = np.zeros(ndata)

# Sum data
for key in keys_to_tavg:
tavgs[avg_key][key] += np.fromstring(lines[data_indices[key]].strip(), dtype=float, sep=' ')

# Record number of files in this bin for averaging
tavgs[avg_key]['nfiles'] += 1

# Normalize
for avg_key in tavgs.keys():
for key in keys_to_tavg:
tavgs[avg_key][key] /= tavgs[avg_key]['nfiles']

# Save dictionary as pickle
import pickle
with open(os.path.join(base_directory, 'tavgs.pickle'), 'wb') as handle:
pickle.dump(tavgs, handle, protocol=pickle.HIGHEST_PROTOCOL)

# Save dictionary as plaintext (json)
import json
import copy
tavgs_jsonable = copy.deepcopy(tavgs)
# Convert NumPy arrays to lists in the copy
def convert_numpys_to_lists(d):
for key, value in d.items():
if isinstance(value, np.ndarray):
d[key] = value.tolist()
elif isinstance(value, dict):
convert_numpys_to_lists(value)
convert_numpys_to_lists(tavgs_jsonable)
with open(os.path.join(base_directory, 'tavgs.json'), 'w') as handle:
json.dump(tavgs_jsonable, handle, indent=4)

# Visualization
import matplotlib.pyplot as plt
fig, ax = plt.subplots(1,1)
for avg_key in tavgs.keys():
ax.plot(tavgs[avg_key]['r'], tavgs[avg_key]['F_M'])
ax.set_xlabel('r')
ax.set_ylabel('FM')
ax.set_xscale('log')
plt.savefig("F_M.png")



0 comments on commit 6ad19c9

Please sign in to comment.