Skip to content

Commit

Permalink
add some protection against negatives in 4th order (#309)
Browse files Browse the repository at this point in the history
this now uses a mask when we convert from averages to centers.  If either density or internal
energy become negative, then we flag the mask for that zone.  We then go back and reset all of the flagged zones to the cell averages -- this will drop the method to 2nd order in that zone, but its better than having negative density because of a steep gradient.
  • Loading branch information
zingale authored Jan 26, 2025
1 parent 0c96684 commit 888a86a
Show file tree
Hide file tree
Showing 3 changed files with 34 additions and 4 deletions.
24 changes: 24 additions & 0 deletions pyro/compressible_fv4/fluxes.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,21 @@ def fluxes(myd, rp, ivars):
U_cc[:, :, ivars.iymom] = myd.to_centers("y-momentum")
U_cc[:, :, ivars.iener] = myd.to_centers("energy")

# the mask will be 1 in any zone where the density or energy
# is unphysical do to the conversion from averages to centers

rhoe = U_cc[..., ivars.iener] - 0.5 * (U_cc[..., ivars.ixmom]**2 +
U_cc[..., ivars.iymom]**2) / U_cc[..., ivars.idens]

mask = myg.scratch_array(dtype=np.uint8)
mask[:, :] = np.where(np.logical_or(U_cc[:, :, ivars.idens] < 0, rhoe < 0),
1, 0)

U_cc[..., ivars.idens] = np.where(mask == 1, U_avg[..., ivars.idens], U_cc[..., ivars.idens])
U_cc[..., ivars.ixmom] = np.where(mask == 1, U_avg[..., ivars.ixmom], U_cc[..., ivars.ixmom])
U_cc[..., ivars.iymom] = np.where(mask == 1, U_avg[..., ivars.iymom], U_cc[..., ivars.iymom])
U_cc[..., ivars.iener] = np.where(mask == 1, U_avg[..., ivars.iener], U_cc[..., ivars.iener])

# compute the primitive variables of both the cell-center and averages
q_bar = comp.cons_to_prim(U_avg, gamma, ivars, myd.grid)
q_cc = comp.cons_to_prim(U_cc, gamma, ivars, myd.grid)
Expand All @@ -66,6 +81,15 @@ def fluxes(myd, rp, ivars):
for n in range(ivars.nq):
q_avg.v(n=n, buf=3)[:, :] = q_cc.v(n=n, buf=3) + myg.dx**2/24.0 * q_bar.lap(n=n, buf=3)

# enforce positivity
q_avg.v(n=ivars.irho, buf=3)[:, :] = np.where(q_avg.v(n=ivars.irho, buf=3) > 0,
q_avg.v(n=ivars.irho, buf=3),
q_cc.v(n=ivars.irho, buf=3))

q_avg.v(n=ivars.ip, buf=3)[:, :] = np.where(q_avg.v(n=ivars.ip, buf=3) > 0,
q_avg.v(n=ivars.ip, buf=3),
q_cc.v(n=ivars.ip, buf=3))

# flattening -- there is a single flattening coefficient (xi) for all directions
use_flattening = rp.get_param("compressible.use_flattening")

Expand Down
8 changes: 7 additions & 1 deletion pyro/mesh/fv.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
"""

import numpy as np

from pyro.mesh.patch import CellCenterData2d


Expand All @@ -13,14 +15,18 @@ class FV2d(CellCenterData2d):
"""

def to_centers(self, name):
def to_centers(self, name, is_positive=False):
""" convert variable name from an average to cell-centers """

a = self.get_var(name)
c = self.grid.scratch_array()
ng = self.grid.ng
c[:, :] = a[:, :]
c.v(buf=ng-1)[:, :] = a.v(buf=ng-1) - self.grid.dx**2*a.lap(buf=ng-1)/24.0

if is_positive:
c.v(buf=ng-1)[:, :] = np.where(c.v(buf=ng-1) >= 0.0, c.v(buf=ng-1), a.v(buf=ng-1))

return c

def from_centers(self, name):
Expand Down
6 changes: 3 additions & 3 deletions pyro/mesh/patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,15 +146,15 @@ def __init__(self, nx, ny, *, ng=1,
self.xr2d = ArrayIndexer(d=xr2d, grid=self)
self.yr2d = ArrayIndexer(d=yr2d, grid=self)

def scratch_array(self, *, nvar=1):
def scratch_array(self, *, nvar=1, dtype=np.float64):
"""
return a standard numpy array dimensioned to have the size
and number of ghostcells as the parent grid
"""
if nvar == 1:
_tmp = np.zeros((self.qx, self.qy), dtype=np.float64)
_tmp = np.zeros((self.qx, self.qy), dtype=dtype)
else:
_tmp = np.zeros((self.qx, self.qy, nvar), dtype=np.float64)
_tmp = np.zeros((self.qx, self.qy, nvar), dtype=dtype)
return ArrayIndexer(d=_tmp, grid=self)

def coarse_like(self, N):
Expand Down

0 comments on commit 888a86a

Please sign in to comment.