Commit ef746cfa authored by mashun1's avatar mashun1
Browse files

veros

parents
Pipeline #1302 canceled with stages
from veros.core import thermodynamics
from veros.pyom_compat import get_random_state
from test_base import compare_state
TEST_SETTINGS = dict(
nx=70,
ny=60,
nz=50,
dt_tracer=3600,
dt_mom=3600,
enable_cyclic_x=True,
enable_conserve_energy=True,
enable_hor_friction_cos_scaling=True,
enable_tempsalt_sources=True,
enable_hor_diffusion=True,
enable_superbee_advection=True,
enable_tke=True,
enable_biharmonic_mixing=True,
enable_neutral_diffusion=True,
enable_skew_diffusion=True,
enable_TEM_friction=True,
eq_of_state_type=1,
)
def prepare_inputs(vs_state, pyom_obj):
# implementations are only identical if non-water values are 0
vs = vs_state.variables
for var in (
"P_diss_sources",
"P_diss_hmix",
):
getattr(pyom_obj.main_module, var.lower())[...] *= vs.maskT
with vs.unlock():
setattr(vs, var, vs.get(var) * vs.maskT)
return vs_state, pyom_obj
def test_thermodynamics(pyom2_lib):
vs_state, pyom_obj = get_random_state(pyom2_lib, extra_settings=TEST_SETTINGS)
vs_state, pyom_obj = prepare_inputs(vs_state, pyom_obj)
vs_state.variables.update(thermodynamics.thermodynamics(vs_state))
pyom_obj.thermodynamics()
compare_state(vs_state, pyom_obj)
import pytest
from veros.core import tke
from veros.pyom_compat import get_random_state
from test_base import compare_state
TEST_SETTINGS = dict(
nx=70,
ny=60,
nz=50,
dt_tracer=3600,
dt_mom=3600,
enable_cyclic_x=True,
enable_idemix=True,
tke_mxl_choice=2,
enable_tke=True,
enable_eke=True,
enable_store_cabbeling_heat=True,
enable_store_bottom_friction_tke=False,
enable_tke_hor_diffusion=True,
enable_tke_superbee_advection=True,
enable_tke_upwind_advection=True,
)
PROBLEM_SETS_SET_DIFF = {
"tke": dict(enable_tke=True),
"no-tke": dict(enable_tke=False),
}
PROBLEM_SETS_INTEGRATE = {
"eke+idemix": dict(enable_eke=True, enable_idemix=True),
"no-eke+idemix": dict(enable_eke=False, enable_idemix=True),
"eke+no-idemix": dict(enable_eke=True, enable_idemix=False),
"no-eke+no-idemix": dict(enable_eke=False, enable_idemix=False),
}
@pytest.mark.parametrize("problem_set", PROBLEM_SETS_SET_DIFF)
def test_set_tke_diffusivities(pyom2_lib, problem_set):
settings = {**TEST_SETTINGS, **PROBLEM_SETS_SET_DIFF[problem_set]}
vs_state, pyom_obj = get_random_state(pyom2_lib, extra_settings=settings)
vs_state.variables.update(tke.set_tke_diffusivities(vs_state))
pyom_obj.set_tke_diffusivities()
compare_state(vs_state, pyom_obj)
@pytest.mark.parametrize("problem_set", PROBLEM_SETS_INTEGRATE)
def test_integrate_tke(pyom2_lib, problem_set):
settings = {**TEST_SETTINGS, **PROBLEM_SETS_INTEGRATE[problem_set]}
vs_state, pyom_obj = get_random_state(pyom2_lib, extra_settings=settings)
vs_state.variables.update(tke.integrate_tke(vs_state))
pyom_obj.integrate_tke()
compare_state(vs_state, pyom_obj)
import pytest
import numpy as np
from veros import runtime_settings
from veros.pyom_compat import load_pyom
@pytest.mark.skipif(runtime_settings.backend != "jax", reason="Must use JAX backend")
@pytest.mark.parametrize("use_ext", [True, False])
def test_solve_tridiag_jax(pyom2_lib, use_ext):
from veros.core.operators import solve_tridiagonal_jax
from veros.core.utilities import create_water_masks
pyom_obj = load_pyom(pyom2_lib)
nx, ny, nz = 70, 60, 50
a, b, c, d = (np.random.randn(nx, ny, nz) for _ in range(4))
kbot = np.random.randint(0, nz, size=(nx, ny))
out_pyom = np.zeros((nx, ny, nz))
for i in range(nx):
for j in range(ny):
ks = kbot[i, j] - 1
ke = nz
if ks < 0:
continue
out_pyom[i, j, ks:ke] = pyom_obj.solve_tridiag(
a=a[i, j, ks:ke], b=b[i, j, ks:ke], c=c[i, j, ks:ke], d=d[i, j, ks:ke], n=ke - ks
)
_, water_mask, edge_mask = create_water_masks(kbot, nz)
object.__setattr__(runtime_settings, "use_special_tdma", use_ext)
out_vs = solve_tridiagonal_jax(a, b, c, d, water_mask, edge_mask)
np.testing.assert_allclose(out_pyom, out_vs)
@pytest.mark.skipif(runtime_settings.backend != "numpy", reason="Must use NumPy backend")
def test_solve_tridiag_numpy(pyom2_lib):
from veros.core.operators import solve_tridiagonal_numpy
from veros.core.utilities import create_water_masks
pyom_obj = load_pyom(pyom2_lib)
nx, ny, nz = 70, 60, 50
a, b, c, d = (np.random.randn(nx, ny, nz) for _ in range(4))
kbot = np.random.randint(0, nz, size=(nx, ny))
out_pyom = np.zeros((nx, ny, nz))
for i in range(nx):
for j in range(ny):
ks = kbot[i, j] - 1
ke = nz
if ks < 0:
continue
out_pyom[i, j, ks:ke] = pyom_obj.solve_tridiag(
a=a[i, j, ks:ke], b=b[i, j, ks:ke], c=c[i, j, ks:ke], d=d[i, j, ks:ke], n=ke - ks
)
_, water_mask, edge_mask = create_water_masks(kbot, nz)
out_vs = solve_tridiagonal_numpy(a, b, c, d, water_mask, edge_mask)
np.testing.assert_allclose(out_pyom, out_vs)
import os
import numpy as np
from veros import veros_routine
from veros.setups.acc import ACCSetup
def _normalize(*arrays):
if any(a.size == 0 for a in arrays):
return arrays
norm = np.abs(arrays[0]).max()
if norm == 0.0:
return arrays
return (a / norm for a in arrays)
class RestartSetup(ACCSetup):
@veros_routine
def set_diagnostics(self, state):
for diag in state.diagnostics.values():
diag.sampling_frequency = state.settings.dt_tracer
diag.output_frequency = float("inf")
def test_restart(tmpdir):
os.chdir(tmpdir)
timesteps_1 = 5
timesteps_2 = 5
dt_tracer = 86_400 / 2
restart_file = "restart.h5"
acc_no_restart = RestartSetup(
override=dict(
identifier="ACC_no_restart",
restart_input_filename=None,
restart_output_filename=restart_file,
dt_tracer=dt_tracer,
runlen=timesteps_1 * dt_tracer,
)
)
acc_no_restart.setup()
acc_no_restart.run()
acc_restart = RestartSetup(
override=dict(
identifier="ACC_restart",
restart_input_filename=restart_file,
restart_output_filename=None,
dt_tracer=dt_tracer,
runlen=timesteps_2 * dt_tracer,
)
)
acc_restart.setup()
acc_restart.run()
with acc_no_restart.state.settings.unlock():
acc_no_restart.state.settings.runlen = timesteps_2 * dt_tracer
acc_no_restart.run()
state_1, state_2 = acc_restart.state, acc_no_restart.state
for setting in state_1.settings.fields():
if setting in ("identifier", "restart_input_filename", "restart_output_filename", "runlen"):
continue
s1 = state_1.settings.get(setting)
s2 = state_2.settings.get(setting)
assert s1 == s2
def check_var(var):
v1 = state_1.variables.get(var)
v2 = state_2.variables.get(var)
np.testing.assert_allclose(*_normalize(v1, v2), atol=1e-10, rtol=0)
for var in state_1.variables.fields():
if var in ("itt",):
continue
# salt is not used by this setup, contains only numerical noise
if "salt" in var:
continue
check_var(var)
def check_diag_var(diag, var):
v1 = state_1.diagnostics[diag].variables.get(var)
v2 = state_2.diagnostics[diag].variables.get(var)
np.testing.assert_allclose(*_normalize(v1, v2), atol=1e-10, rtol=0)
for diag in state_1.diagnostics:
if getattr(state_1.diagnostics[diag], "variables", None) is None:
continue
for var in state_1.diagnostics[diag].variables.fields():
if var in ("itt",):
continue
check_diag_var(diag, var)
import pytest
@pytest.fixture(autouse=True)
def set_options():
from veros import runtime_settings
object.__setattr__(runtime_settings, "diskless_mode", True)
try:
yield
finally:
object.__setattr__(runtime_settings, "diskless_mode", False)
@pytest.mark.parametrize("float_type", ("float32", "float64"))
def test_setup_acc(float_type):
from veros import runtime_settings
object.__setattr__(runtime_settings, "float_type", float_type)
from veros.setups.acc import ACCSetup
sim = ACCSetup()
sim.setup()
with sim.state.settings.unlock():
sim.state.settings.runlen = sim.state.settings.dt_tracer * 20
sim.run()
def test_setup_acc_basic():
from veros.setups.acc_basic import ACCBasicSetup
sim = ACCBasicSetup()
sim.setup()
with sim.state.settings.unlock():
sim.state.settings.runlen = sim.state.settings.dt_tracer * 20
sim.run()
def test_setup_4deg():
from veros.setups.global_4deg import GlobalFourDegreeSetup
sim = GlobalFourDegreeSetup()
sim.setup()
with sim.state.settings.unlock():
sim.state.settings.runlen = sim.state.settings.dt_tracer * 20
sim.run()
def test_setup_flexible():
from veros.setups.global_flexible import GlobalFlexibleResolutionSetup
sim = GlobalFlexibleResolutionSetup(
override=dict(
nx=100,
ny=50,
dt_tracer=3600,
dt_mom=3600,
)
)
sim.setup()
with sim.state.settings.unlock():
sim.state.settings.runlen = sim.state.settings.dt_tracer * 20
sim.run()
def test_setup_1deg():
from veros.setups.global_1deg import GlobalOneDegreeSetup
# too big to test
GlobalOneDegreeSetup()
def test_setup_north_atlantic():
from veros.setups.north_atlantic import NorthAtlanticSetup
sim = NorthAtlanticSetup(override=dict(nx=100, ny=100, nz=50))
sim.setup()
with sim.state.settings.unlock():
sim.state.settings.runlen = sim.state.settings.dt_tracer
sim.run()
import pytest
from veros.state import VerosSettings, VerosVariables, VerosState
@pytest.fixture
def dummy_state():
from veros.variables import VARIABLES, DIM_TO_SHAPE_VAR
from veros.settings import SETTINGS
return VerosState(VARIABLES, SETTINGS, DIM_TO_SHAPE_VAR)
@pytest.fixture
def dummy_settings():
from veros.settings import SETTINGS
return VerosSettings(SETTINGS)
@pytest.fixture
def dummy_variables():
from veros.variables import VARIABLES, DIM_TO_SHAPE_VAR
from veros.settings import SETTINGS
dummy_state = VerosState(VARIABLES, SETTINGS, DIM_TO_SHAPE_VAR)
dummy_state.initialize_variables()
return dummy_state.variables
def test_lock_settings(dummy_settings):
orig_val = dummy_settings.dt_tracer
with pytest.raises(RuntimeError):
dummy_settings.dt_tracer = 0
assert dummy_settings.dt_tracer == orig_val
with dummy_settings.unlock():
dummy_settings.dt_tracer = 1
assert dummy_settings.dt_tracer == 1
def test_settings_repr(dummy_settings):
with dummy_settings.unlock():
dummy_settings.dt_tracer = 1
assert "dt_tracer = 1.0," in repr(dummy_settings)
def test_variables_repr(dummy_variables):
from veros.core.operators import numpy as npx
array_type = type(npx.array([]))
assert f"tau = {array_type} with shape (), dtype int32," in repr(dummy_variables)
def test_to_xarray(dummy_state):
pytest.importorskip("xarray")
dummy_state.initialize_variables()
ds = dummy_state.to_xarray()
# settings
assert tuple(ds.attrs.keys()) == tuple(dummy_state.settings.fields())
assert tuple(ds.attrs.values()) == tuple(dummy_state.settings.values())
# dimensions
used_dims = set()
for var, meta in dummy_state.var_meta.items():
if var in dummy_state.variables:
if meta.dims is None:
continue
used_dims |= set(meta.dims)
assert set(ds.coords.keys()) == used_dims
for dim in used_dims:
assert int(ds.dims[dim]) == dummy_state.dimensions[dim]
# variables
for var in dummy_state.variables.fields():
assert var in ds
def test_variable_init(dummy_state):
with pytest.raises(RuntimeError):
dummy_state.variables
dummy_state.initialize_variables()
assert isinstance(dummy_state.variables, VerosVariables)
with pytest.raises(RuntimeError):
dummy_state.initialize_variables()
def test_set_dimension(dummy_state):
with dummy_state.settings.unlock():
dummy_state.settings.nx = 10
assert dummy_state.dimensions["xt"] == 10
dummy_state.dimensions["foobar"] = 42
assert dummy_state.dimensions["foobar"] == 42
with pytest.raises(RuntimeError):
dummy_state.dimensions["xt"] = 11
assert dummy_state._dimensions["xt"] == "nx"
def test_resize_dimension(dummy_state):
from veros.state import resize_dimension
with dummy_state.settings.unlock():
dummy_state.settings.nx = 10
dummy_state.initialize_variables()
assert dummy_state.dimensions["xt"] == 10
assert dummy_state.variables.dxt.shape == (14,)
resize_dimension(dummy_state, "xt", 100)
assert dummy_state.dimensions["xt"] == 100
assert dummy_state.variables.dxt.shape == (104,)
def test_timers(dummy_state):
from veros.timer import Timer
timer = dummy_state.timers["foobar"]
assert isinstance(timer, Timer)
This source diff could not be displayed because it is too large. You can view the blob instead.
diff --git a/for_src/idemix/idemix.f90 b/for_src/idemix/idemix.f90
index 21f9775..60c6070 100644
--- a/for_src/idemix/idemix.f90
+++ b/for_src/idemix/idemix.f90
@@ -143,7 +143,7 @@ subroutine integrate_idemix
(v0(i+1,j,:)*E_iw(i+1,j,:,tau)-v0(i,j,:)*E_iw(i,j,:,tau))/(cost(j)*dxu(i))*maskU(i,j,:)
enddo
enddo
- flux_east(ie_pe-onx,:,:)=0.d0
+ flux_east(ie_pe+onx,:,:)=0.d0
do j=js_pe-onx,je_pe+onx-1
flux_north(:,j,:)= tau_h*0.5d0*(v0(:,j+1,:)+v0(:,j,:)) * &
(v0(:,j+1,:)*E_iw(:,j+1,:,tau)-v0(:,j,:)*E_iw(:,j,:,tau))/dyu(j)*maskV(:,j,:)*cosu(j)
@@ -197,11 +197,11 @@ function gofx2(x)
! a function g(x)
!=======================================================================
implicit none
- real*8 :: gofx2,x,c
+ real*8 :: gofx2,x,fxa,c
real*8, parameter :: pi = 3.14159265358979323846264338327950588d0
- x=max(3d0,x)
- c= 1.d0-(2.d0/pi)*asin(1.d0/x)
- gofx2 = 2/pi/c*0.9d0*x**(-2.d0/3.d0)*(1-exp(-x/4.3d0))
+ fxa=max(3d0,x)
+ c= 1.d0-(2.d0/pi)*asin(1.d0/fxa)
+ gofx2 = 2/pi/c*0.9d0*fxa**(-2.d0/3.d0)*(1-exp(-fxa/4.3d0))
end function gofx2
function hofx1(x)
diff --git a/for_src/isoneutral/isoneutral_diffusion.f90 b/for_src/isoneutral/isoneutral_diffusion.f90
index 2ccf689..3c52400 100644
--- a/for_src/isoneutral/isoneutral_diffusion.f90
+++ b/for_src/isoneutral/isoneutral_diffusion.f90
@@ -23,6 +23,8 @@ subroutine isoneutral_diffusion(is_,ie_,js_,je_,nz_,tr,istemp)
real*8 :: bloc(is_pe-onx:ie_pe+onx,js_pe-onx:je_pe+onx,nz)
real*8 :: fxa,diffloc
+ aloc = 0
+
!-----------------------------------------------------------------------
! construct total isoneutral tracer flux at east face of "T" cells
!-----------------------------------------------------------------------
@@ -161,6 +163,7 @@ if (enable_conserve_energy) then
bloc(:,:,:) = int_drhodS(:,:,:,tau)
endif
+ aloc = 0
do k=1,nz
do j=js_pe-onx+1,je_pe+onx-1
do i=is_pe-onx+1,ie_pe+onx-1
@@ -241,6 +244,8 @@ subroutine isoneutral_skew_diffusion(is_,ie_,js_,je_,nz_,tr,istemp)
real*8 :: bloc(is_pe-onx:ie_pe+onx,js_pe-onx:je_pe+onx,nz)
real*8 :: fxa,diffloc
+ aloc = 0
+
!-----------------------------------------------------------------------
! construct total isoneutral tracer flux at east face of "T" cells
!-----------------------------------------------------------------------
@@ -341,6 +346,7 @@ if (enable_conserve_energy) then
bloc(:,:,:) = int_drhodS(:,:,:,tau)
endif
+ aloc = 0
do k=1,nz
do j=js_pe-onx+1,je_pe+onx-1
do i=is_pe-onx+1,ie_pe+onx-1
@@ -555,6 +561,7 @@ if (enable_conserve_energy) then
bloc(:,:,:) = int_drhodS(:,:,:,tau)
endif
+ aloc = 0
do k=1,nz
do j=js_pe-onx+1,je_pe+onx-1
do i=is_pe-onx+1,ie_pe+onx-1
diff --git a/for_src/isoneutral/isoneutral_friction.f90 b/for_src/isoneutral/isoneutral_friction.f90
index 695008d..095dc09 100644
--- a/for_src/isoneutral/isoneutral_friction.f90
+++ b/for_src/isoneutral/isoneutral_friction.f90
@@ -15,6 +15,8 @@ subroutine isoneutral_friction
real*8 :: diss(is_pe-onx:ie_pe+onx,js_pe-onx:je_pe+onx,nz)
real*8 :: aloc(is_pe-onx:ie_pe+onx,js_pe-onx:je_pe+onx,nz)
+ diss = 0
+
if (enable_implicit_vert_friction) then
aloc=u(:,:,:,taup1)
else
diff --git a/for_src/main/diffusion.f90 b/for_src/main/diffusion.f90
index 378fd77..ab51b00 100644
--- a/for_src/main/diffusion.f90
+++ b/for_src/main/diffusion.f90
@@ -11,7 +11,7 @@ subroutine tempsalt_biharmonic
implicit none
integer :: i,j,k,ks,is,ie,js,je
real*8 :: aloc(is_pe-onx:ie_pe+onx,js_pe-onx:je_pe+onx,nz)
- real*8 :: del2(is_pe-onx:ie_pe+onx,js_pe-onx:je_pe+onx,nz),fxa
+ real*8 :: del2(is_pe-onx:ie_pe+onx,js_pe-onx:je_pe+onx,nz),fxa,fxb
is = is_pe-onx; ie = ie_pe+onx; js = js_pe-onx; je = je_pe+onx
fxa = sqrt(abs(K_hbi))
@@ -56,14 +56,15 @@ subroutine tempsalt_biharmonic
if (enable_conserve_energy) then
! diagnose dissipation of dynamic enthalpy by hor. mixing of temperature
+ aloc = 0
do k=1,nz
- do j=js_pe,je_pe
- do i=is_pe,ie_pe
- fxa = int_drhodT(i,j,k,tau)
- aloc(i,j,k) =+0.5d0*grav/rho_0*( (int_drhodT(i+1,j,k,tau)-fxa)*flux_east(i ,j,k) &
- +(fxa-int_drhodT(i-1,j,k,tau))*flux_east(i-1,j,k) ) /(dxt(i)*cost(j)) &
- +0.5d0*grav/rho_0*( (int_drhodT(i,j+1,k,tau)-fxa)*flux_north(i,j ,k) &
- +(fxa-int_drhodT(i,j-1,k,tau))*flux_north(i,j-1,k) ) /(dyt(j)*cost(j))
+ do j=js_pe-onx+1,je_pe+onx-1
+ do i=is_pe-onx+1,ie_pe+onx-1
+ fxb = int_drhodT(i,j,k,tau)
+ aloc(i,j,k) =+0.5d0*grav/rho_0*( (int_drhodT(i+1,j,k,tau)-fxb)*flux_east(i ,j,k) &
+ +(fxb-int_drhodT(i-1,j,k,tau))*flux_east(i-1,j,k) ) /(dxt(i)*cost(j)) &
+ +0.5d0*grav/rho_0*( (int_drhodT(i,j+1,k,tau)-fxb)*flux_north(i,j ,k) &
+ +(fxb-int_drhodT(i,j-1,k,tau))*flux_north(i,j-1,k) ) /(dyt(j)*cost(j))
enddo
enddo
end do
@@ -125,14 +126,15 @@ endif
if (enable_conserve_energy) then
! diagnose dissipation of dynamic enthalpy by hor. mixing of salinity
+ aloc = 0
do k=1,nz
do j=js_pe-onx+1,je_pe+onx-1
do i=is_pe-onx+1,ie_pe+onx-1
- fxa = int_drhodS(i,j,k,tau)
- aloc(i,j,k) =+0.5d0*grav/rho_0*( (int_drhodS(i+1,j,k,tau)-fxa)*flux_east(i ,j,k) &
- +(fxa-int_drhodS(i-1,j,k,tau))*flux_east(i-1,j,k) ) /(dxt(i)*cost(j)) &
- +0.5d0*grav/rho_0*( (int_drhodS(i,j+1,k,tau)-fxa)*flux_north(i,j ,k) &
- +(fxa-int_drhodS(i,j-1,k,tau))*flux_north(i,j-1,k) ) /(dyt(j)*cost(j))
+ fxb = int_drhodS(i,j,k,tau)
+ aloc(i,j,k) =+0.5d0*grav/rho_0*( (int_drhodS(i+1,j,k,tau)-fxb)*flux_east(i ,j,k) &
+ +(fxb-int_drhodS(i-1,j,k,tau))*flux_east(i-1,j,k) ) /(dxt(i)*cost(j)) &
+ +0.5d0*grav/rho_0*( (int_drhodS(i,j+1,k,tau)-fxb)*flux_north(i,j ,k) &
+ +(fxb-int_drhodS(i,j-1,k,tau))*flux_north(i,j-1,k) ) /(dyt(j)*cost(j))
enddo
enddo
end do
@@ -197,6 +199,7 @@ subroutine tempsalt_diffusion
if (enable_conserve_energy) then
! diagnose dissipation of dynamic enthalpy by hor. mixing of temperature
+ aloc = 0
do k=1,nz
do j=js_pe-onx+1,je_pe+onx-1
do i=is_pe-onx+1,ie_pe+onx-1
@@ -253,6 +256,7 @@ endif
if (enable_conserve_energy) then
! diagnose dissipation of dynamic enthalpy by hor. mixing of salinity
+ aloc = 0
do k=1,nz
do j=js_pe-onx+1,je_pe+onx-1
do i=is_pe-onx+1,ie_pe+onx-1
@@ -303,6 +307,7 @@ subroutine tempsalt_sources
if (enable_conserve_energy) then
! diagnose effect on dynamic enthalpy
+ aloc = 0
do k=1,nz
do j=js_pe-onx+1,je_pe+onx-1
do i=is_pe-onx+1,ie_pe+onx-1
diff --git a/for_src/main/friction.f90 b/for_src/main/friction.f90
index be0c02b..e0c944d 100644
--- a/for_src/main/friction.f90
+++ b/for_src/main/friction.f90
@@ -12,6 +12,8 @@ subroutine explicit_vert_friction
integer :: i,j,k
real*8 :: diss(is_pe-onx:ie_pe+onx,js_pe-onx:je_pe+onx,nz),fxa
+ diss = 0
+
!---------------------------------------------------------------------------------
! vertical friction of zonal momentum
!---------------------------------------------------------------------------------
@@ -116,6 +118,8 @@ subroutine implicit_vert_friction
real*8 :: a_tri(nz),b_tri(nz),c_tri(nz),d_tri(nz),delta(nz),fxa
real*8 :: diss(is_pe-onx:ie_pe+onx,js_pe-onx:je_pe+onx,nz)
+ diss = 0
+
!---------------------------------------------------------------------------------
! implicit vertical friction of zonal momentum
!---------------------------------------------------------------------------------
@@ -695,6 +699,8 @@ subroutine biharmonic_friction
+(flux_north(i,j,:) - flux_north(i,j-1,:))/(cost(j)*dyt(j))
enddo
enddo
+ del2(is,:,:)=0.d0
+ del2(:,js,:)=0.d0
do j=js,je
do i=is,ie-1
diff --git a/for_src/main/thermodynamics.f90 b/for_src/main/thermodynamics.f90
index f5017ed..8a4d527 100644
--- a/for_src/main/thermodynamics.f90
+++ b/for_src/main/thermodynamics.f90
@@ -298,6 +298,7 @@ subroutine advect_temperature
else
call adv_flux_2nd(is_pe-onx,ie_pe+onx,js_pe-onx,je_pe+onx,nz,flux_east,flux_north,flux_top,temp(:,:,:,tau))
endif
+ dtemp(:,:,:,tau) = 0
do j=js_pe,je_pe
do i=is_pe,ie_pe
dtemp(i,j,:,tau)=maskT(i,j,:)* (-( flux_east(i,j,:)- flux_east(i-1,j,:))/(cost(j)*dxt(i)) &
@@ -325,6 +326,7 @@ subroutine advect_salinity
else
call adv_flux_2nd(is_pe-onx,ie_pe+onx,js_pe-onx,je_pe+onx,nz,flux_east,flux_north,flux_top,salt(:,:,:,tau))
endif
+ dsalt(:,:,:,tau) = 0
do j=js_pe,je_pe
do i=is_pe,ie_pe
dsalt(i,j,:,tau)=maskT(i,j,:)* (-( flux_east(i,j,:)- flux_east(i-1,j,:))/(cost(j)*dxt(i)) &
diff --git a/for_src/tke/tke.f90 b/for_src/tke/tke.f90
index 7226d76..6f888c3 100644
--- a/for_src/tke/tke.f90
+++ b/for_src/tke/tke.f90
@@ -193,7 +193,7 @@ subroutine integrate_tke
flux_east(i,j,:)=K_h_tke*(tke(i+1,j,:,tau)-tke(i,j,:,tau))/(cost(j)*dxu(i))*maskU(i,j,:)
enddo
enddo
- flux_east(ie_pe-onx,:,:)=0.d0
+ flux_east(ie_pe+onx,:,:)=0.d0
do j=js_pe-onx,je_pe+onx-1
flux_north(:,j,:)=K_h_tke*(tke(:,j+1,:,tau)-tke(:,j,:,tau))/dyu(j)*maskV(:,j,:)*cosu(j)
enddo
diff --git a/for_src/external/solve_pressure.f90 b/for_src/external/solve_pressure.f90
index d331ae8..07b62f8 100644
--- a/for_src/external/solve_pressure.f90
+++ b/for_src/external/solve_pressure.f90
@@ -59,7 +59,7 @@ subroutine solve_pressure
if (enable_free_surface) then
do j=js_pe,je_pe
do i=is_pe,ie_pe
- forc(i,j) = forc(i,j) - psi(i,j,tau)/(grav*dt_mom**2)*maskT(i,j,nz)
+ forc(i,j) = forc(i,j) - psi(i,j,tau)/(grav*dt_mom*dt_tracer)*maskT(i,j,nz)
enddo
enddo
endif
@@ -125,7 +125,7 @@ end subroutine solve_pressure
if (enable_free_surface) then
do j=js_pe,je_pe
do i=is_pe,ie_pe
- cf(i,j,0+2,0+2) = cf(i,j,0+2,0+2) - 1D0/(grav*dt_mom**2) *maskM(i,j)
+ cf(i,j,0+2,0+2) = cf(i,j,0+2,0+2) - 1D0/(grav*dt_mom*dt_tracer) *maskM(i,j)
end do
end do
endif
# fortran front end
F90 = mpif90
CDFFLAGS = $(shell nf-config --fflags --flibs)
MPIFLAGS =
F90FLAGS = -llapack -lblas -fconvert=big-endian -O3 -Wall #-check bounds
# python front end
F2PYFLAGS = -llapack -lblas
F2PY_MPIFLAGS = -I/usr/lib/x86_64-linux-gnu/openmpi/include -L/usr/lib/x86_64-linux-gnu/openmpi/lib -lmpi_usempif08 -lmpi_usempi_ignore_tkr -lmpi_mpifh -lmpi
# fortran front end
F90 = mpif90
CDFFLAGS = $(shell nf-config --fflags --flibs)
MPIFLAGS =
F90FLAGS = -llapack -lblas -fconvert=big-endian -O3 -Wall #-check bounds
# python front end
F2PYFLAGS = --backend meson --dep lapack
F2PY_MPIFLAGS = --dep mpi
\ No newline at end of file
"""Veros, the versatile ocean simulator"""
import sys
import types
# black magic: ensure lazy imports for public API by overriding module.__class__
def _reraise_exceptions(func):
import functools
@functools.wraps(func)
def reraise_wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception as e:
raise ImportError("Critical error during initial import") from e
return reraise_wrapper
class _PublicAPI(types.ModuleType):
@property
@_reraise_exceptions
def __version__(self):
from veros._version import get_versions
return get_versions()["version"]
@property
@_reraise_exceptions
def logger(self):
if not hasattr(self, "_logger"):
from veros.logs import setup_logging
self._logger = setup_logging()
return self._logger
@property
@_reraise_exceptions
def runtime_settings(self):
if not hasattr(self, "_runtime_settings"):
from veros.runtime import RuntimeSettings
self._runtime_settings = RuntimeSettings()
return self._runtime_settings
@property
@_reraise_exceptions
def runtime_state(self):
if not hasattr(self, "_runtime_state"):
from veros.runtime import RuntimeState
self._runtime_state = RuntimeState()
return self._runtime_state
@property
@_reraise_exceptions
def veros_routine(self):
from veros.routines import veros_routine
return veros_routine
@property
@_reraise_exceptions
def veros_kernel(self):
from veros.routines import veros_kernel
return veros_kernel
@property
@_reraise_exceptions
def KernelOutput(self):
from veros.state import KernelOutput
return KernelOutput
@property
@_reraise_exceptions
def VerosSetup(self):
from veros.veros import VerosSetup
return VerosSetup
@property
@_reraise_exceptions
def VerosState(self):
from veros.state import VerosState
return VerosState
sys.modules[__name__].__class__ = _PublicAPI
del sys
del types
del _PublicAPI
del _reraise_exceptions
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
# This file is released into the public domain.
# Generated by versioneer-0.28
# https://github.com/python-versioneer/python-versioneer
"""Git implementation of _version.py."""
import errno
import os
import re
import subprocess
import sys
from typing import Callable, Dict
import functools
def get_keywords():
"""Get the keywords needed to look up the version information."""
# these strings will be replaced by git during git-archive.
# setup.py/versioneer.py will grep for the variable names, so they must
# each be defined on a line of their own. _version.py will just call
# get_keywords().
git_refnames = "$Format:%d$"
git_full = "$Format:%H$"
git_date = "$Format:%ci$"
keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
return keywords
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
def get_config():
"""Create, populate and return the VersioneerConfig() object."""
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
cfg = VersioneerConfig()
cfg.VCS = "git"
cfg.style = "pep440"
cfg.tag_prefix = "v"
cfg.parentdir_prefix = "None"
cfg.versionfile_source = "veros/_version.py"
cfg.verbose = False
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
LONG_VERSION_PY: Dict[str, str] = {}
HANDLERS: Dict[str, Dict[str, Callable]] = {}
def register_vcs_handler(vcs, method): # decorator
"""Create decorator to mark a method as the handler of a VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
process = None
popen_kwargs = {}
if sys.platform == "win32":
# This hides the console window if pythonw.exe is used
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
popen_kwargs["startupinfo"] = startupinfo
for command in commands:
try:
dispcmd = str([command] + args)
# remember shell=False, so use git.cmd on windows, not just git
process = subprocess.Popen([command] + args, cwd=cwd, env=env,
stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None), **popen_kwargs)
break
except OSError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % dispcmd)
print(e)
return None, None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None, None
stdout = process.communicate()[0].strip().decode()
if process.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
print("stdout was %s" % stdout)
return None, process.returncode
return stdout, process.returncode
def versions_from_parentdir(parentdir_prefix, root, verbose):
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes both
the project name and a version string. We will also support searching up
two directory levels for an appropriately named parent directory
"""
rootdirs = []
for _ in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
rootdirs.append(root)
root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %s but none started with prefix %s" %
(str(rootdirs), parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
with open(versionfile_abs, "r") as fobj:
for line in fobj:
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
if line.strip().startswith("git_date ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["date"] = mo.group(1)
except OSError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if "refnames" not in keywords:
raise NotThisMethod("Short version file found")
date = keywords.get("date")
if date is not None:
# Use only the last line. Previous lines may contain GPG signature
# information.
date = date.splitlines()[-1]
# git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
# it's been around since git-1.5.3, and it's too difficult to
# discover which version we're using, or to work around using an
# older one.
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = {r.strip() for r in refnames.strip("()").split(",")}
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = {r[len(TAG):] for r in refs if r.startswith(TAG)}
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = {r for r in refs if re.search(r'\d', r)}
if verbose:
print("discarding '%s', no digits" % ",".join(refs - tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
# Filter out refs that exactly match prefix or that don't start
# with a number once the prefix is stripped (mostly a concern
# when prefix is '')
if not re.match(r'\d', r):
continue
if verbose:
print("picking %s" % r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None,
"date": date}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags", "date": None}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
expanded, and _version.py hasn't already been rewritten with a short
version string, meaning we're inside a checked out source tree.
"""
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
# GIT_DIR can interfere with correct operation of Versioneer.
# It may be intended to be passed to the Versioneer-versioned project,
# but that should not change where we get our version from.
env = os.environ.copy()
env.pop("GIT_DIR", None)
runner = functools.partial(runner, env=env)
_, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root,
hide_stderr=not verbose)
if rc != 0:
if verbose:
print("Directory %s not under git control" % root)
raise NotThisMethod("'git rev-parse --git-dir' returned error")
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
describe_out, rc = runner(GITS, [
"describe", "--tags", "--dirty", "--always", "--long",
"--match", f"{tag_prefix}[[:digit:]]*"
], cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"],
cwd=root)
# --abbrev-ref was added in git-1.6.3
if rc != 0 or branch_name is None:
raise NotThisMethod("'git rev-parse --abbrev-ref' returned error")
branch_name = branch_name.strip()
if branch_name == "HEAD":
# If we aren't exactly on a branch, pick a branch which represents
# the current commit. If all else fails, we are on a branchless
# commit.
branches, rc = runner(GITS, ["branch", "--contains"], cwd=root)
# --contains was added in git-1.5.4
if rc != 0 or branches is None:
raise NotThisMethod("'git branch --contains' returned error")
branches = branches.split("\n")
# Remove the first line if we're running detached
if "(" in branches[0]:
branches.pop(0)
# Strip off the leading "* " from the list of branches.
branches = [branch[2:] for branch in branches]
if "master" in branches:
branch_name = "master"
elif not branches:
branch_name = None
else:
# Pick the first branch that is returned. Good or bad.
branch_name = branches[0]
pieces["branch"] = branch_name
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparsable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%s'"
% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root)
pieces["distance"] = len(out.split()) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip()
# Use only the last line. Previous lines may contain GPG signature
# information.
date = date.splitlines()[-1]
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
Exceptions:
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%d.g%s" % (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_branch(pieces):
"""TAG[[.dev0]+DISTANCE.gHEX[.dirty]] .
The ".dev0" means not master branch. Note that .dev0 sorts backwards
(a feature branch will appear "older" than the master branch).
Exceptions:
1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
if pieces["branch"] != "master":
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0"
if pieces["branch"] != "master":
rendered += ".dev0"
rendered += "+untagged.%d.g%s" % (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def pep440_split_post(ver):
"""Split pep440 version string at the post-release segment.
Returns the release segments before the post-release and the
post-release version number (or -1 if no post-release segment is present).
"""
vc = str.split(ver, ".post")
return vc[0], int(vc[1] or 0) if len(vc) == 2 else None
def render_pep440_pre(pieces):
"""TAG[.postN.devDISTANCE] -- No -dirty.
Exceptions:
1: no tags. 0.post0.devDISTANCE
"""
if pieces["closest-tag"]:
if pieces["distance"]:
# update the post release segment
tag_version, post_version = pep440_split_post(pieces["closest-tag"])
rendered = tag_version
if post_version is not None:
rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"])
else:
rendered += ".post0.dev%d" % (pieces["distance"])
else:
# no commits, use the tag as the version
rendered = pieces["closest-tag"]
else:
# exception #1
rendered = "0.post0.dev%d" % pieces["distance"]
return rendered
def render_pep440_post(pieces):
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear "older" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%s" % pieces["short"]
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%s" % pieces["short"]
return rendered
def render_pep440_post_branch(pieces):
"""TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] .
The ".dev0" means not master branch.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["branch"] != "master":
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%s" % pieces["short"]
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["branch"] != "master":
rendered += ".dev0"
rendered += "+g%s" % pieces["short"]
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
The distance/hash is unconditional.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"],
"date": None}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-branch":
rendered = render_pep440_branch(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-post-branch":
rendered = render_pep440_post_branch(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%s'" % style)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None,
"date": pieces.get("date")}
def get_versions():
"""Get version information or return default if unable to do so."""
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded keywords.
cfg = get_config()
verbose = cfg.verbose
try:
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
verbose)
except NotThisMethod:
pass
try:
root = os.path.realpath(__file__)
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
for _ in cfg.versionfile_source.split('/'):
root = os.path.dirname(root)
except NameError:
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to find root of source tree",
"date": None}
try:
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
return render(pieces, cfg.style)
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
except NotThisMethod:
pass
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to compute version", "date": None}
import warnings
BACKENDS = ("numpy", "jax")
BACKEND_MESSAGES = {"jax": "Kernels are compiled during first iteration, be patient"}
_init_done = set()
def init_jax_config():
if "jax" in _init_done:
return
import jax
from veros import runtime_settings, runtime_state
from veros.state import (
VerosState,
VerosVariables,
DistSafeVariableWrapper,
veros_state_pytree_flatten,
veros_state_pytree_unflatten,
veros_variables_pytree_flatten,
veros_variables_pytree_unflatten,
dist_safe_wrapper_pytree_flatten,
dist_safe_wrapper_pytree_unflatten,
)
if runtime_state.proc_num > 1:
try:
import mpi4jax # noqa: F401
except ImportError as exc:
raise RuntimeError("Running JAX with MPI requires mpi4jax to be installed") from exc
if runtime_settings.float_type == "float64":
jax.config.update("jax_enable_x64", True)
else:
# ignore warnings about unavailable x64 types
warnings.filterwarnings("ignore", message="Explicitly requested dtype.*", module="jax")
jax.config.update("jax_platform_name", runtime_settings.device)
jax.tree_util.register_pytree_node(VerosState, veros_state_pytree_flatten, veros_state_pytree_unflatten)
jax.tree_util.register_pytree_node(VerosVariables, veros_variables_pytree_flatten, veros_variables_pytree_unflatten)
jax.tree_util.register_pytree_node(
DistSafeVariableWrapper, dist_safe_wrapper_pytree_flatten, dist_safe_wrapper_pytree_unflatten
)
_init_done.add("jax")
def get_backend_module(backend_name):
if backend_name not in BACKENDS:
raise ValueError(f"unrecognized backend {backend_name} (must be either of: {list(BACKENDS.keys())!r})")
if backend_name == "jax":
init_jax_config()
import jax.numpy as backend_module
elif backend_name == "numpy":
import numpy as backend_module
return backend_module
def get_curent_device_name():
from veros import runtime_settings
if runtime_settings.backend != "jax":
return "cpu"
return runtime_settings.device
#!/usr/bin/env python
try:
import click
have_click = True
except ImportError:
have_click = False
if not have_click:
raise ImportError("The Veros command line tools require click (e.g. through `pip install click`)")
del click
del have_click
from veros.cli import veros, veros_run, veros_copy_setup, veros_create_mask, veros_resubmit # noqa: E402
veros.cli.add_command(veros_run.cli, "run")
veros.cli.add_command(veros_copy_setup.cli, "copy-setup")
veros.cli.add_command(veros_create_mask.cli, "create-mask")
veros.cli.add_command(veros_resubmit.cli, "resubmit")
import click
@click.group("veros")
@click.version_option()
def cli():
"""Veros command-line tools"""
pass
#!/usr/bin/env python
import os
import shutil
import datetime
import functools
import textwrap
import importlib
import click
import entrypoints
SETUPDIR_ENVVAR = "VEROS_SETUP_DIR"
IGNORE_PATTERNS = ["__init__.py", "*.pyc", "__pycache__"]
SETUPS = {}
setup_dirs = []
for e in entrypoints.get_group_all("veros.setup_dirs"):
# - hack to prevent actually importing plugins -
# we can only find the location of top-level modules
# so assume that foo.bar.baz always resolves to foo/bar/baz
base_module, *submodules = e.module_name.split(".")
modpath = os.path.join(os.path.dirname(importlib.util.find_spec(base_module).origin), *submodules)
setup_dirs.append(modpath)
for setup_dir in os.environ.get(SETUPDIR_ENVVAR, "").split(";"):
if os.path.isdir(setup_dir):
setup_dirs.append(setup_dir)
# populate {setup_name: path} mapping
for setup_dir in setup_dirs:
for setup in os.listdir(setup_dir):
setup_path = os.path.join(setup_dir, setup)
if not os.path.isdir(setup_path):
continue
if setup.startswith(("_", ".")):
continue
SETUPS[setup] = setup_path
SETUP_NAMES = sorted(SETUPS.keys())
def rewrite_main_file(target_file, setup_name):
from veros import __version__ as veros_version
current_date = datetime.datetime.utcnow()
header_str = textwrap.dedent(
f'''
"""
This Veros setup file was generated by
$ veros copy-setup {setup_name}
on {current_date:%Y-%m-%d %H:%M:%S} UTC.
"""
__VEROS_VERSION__ = {veros_version!r}
if __name__ == "__main__":
raise RuntimeError(
"Veros setups cannot be executed directly. "
f"Try `veros run {{__file__}}` instead."
)
# -- end of auto-generated header, original file below --
'''
).strip()
with open(target_file, "r") as f:
orig_contents = f.readlines()
shebang = None
if orig_contents[0].startswith("#!"):
shebang = orig_contents[0]
orig_contents = orig_contents[1:]
with open(target_file, "w") as f:
if shebang is not None:
f.write(shebang + "\n")
f.write(header_str + "\n\n")
f.writelines(orig_contents)
def copy_setup(setup, to=None):
"""Copy a standard setup to another directory.
Available setups:
{setups}
Example:
$ veros copy-setup global_4deg --to ~/veros-setups/4deg-lowfric
Further directories containing setup templates can be added to this command
via the {setup_envvar} environment variable.
"""
if to is None:
to = os.path.join(os.getcwd(), setup)
if os.path.exists(to):
raise RuntimeError("Target directory must not exist")
to_parent = os.path.dirname(os.path.realpath(to))
if not os.path.exists(to_parent):
os.makedirs(to_parent)
ignore = shutil.ignore_patterns(*IGNORE_PATTERNS)
shutil.copytree(SETUPS[setup], to, ignore=ignore)
main_setup_file = os.path.join(to, f"{setup}.py")
rewrite_main_file(main_setup_file, setup)
copy_setup.__doc__ = copy_setup.__doc__.format(setups=", ".join(SETUP_NAMES), setup_envvar=SETUPDIR_ENVVAR)
@click.command("veros-copy-setup")
@click.argument("setup", type=click.Choice(SETUP_NAMES), metavar="SETUP")
@click.option(
"--to",
required=False,
default=None,
type=click.Path(dir_okay=False, file_okay=False, writable=True),
help=("Target directory, must not exist " "(default: copy to current working directory)"),
)
@functools.wraps(copy_setup)
def cli(*args, **kwargs):
copy_setup(*args, **kwargs)
#!/usr/bin/env python
import functools
import click
def get_mask_data(depth):
import numpy as np
return np.where(depth > 0, 255, 0).astype(np.uint8)
def smooth_image(data, sigma):
from scipy import ndimage
return ndimage.gaussian_filter(data, sigma=sigma)
def save_image(data, path):
import numpy as np
from PIL import Image
Image.fromarray(np.flipud(data)).convert("1").save(path)
def create_mask(infile, outfile, variable="z", scale=None):
"""Creates a mask image from a given netCDF file"""
import numpy as np
import h5netcdf
with h5netcdf.File(infile, "r") as topo:
z = np.array(topo.variables[variable])
if scale is not None:
z = smooth_image(z, scale)
data = get_mask_data(z)
save_image(data, outfile)
@click.command("veros-create-mask")
@click.argument("infile", type=click.Path(exists=True, dir_okay=False))
@click.option("-v", "--variable", default="z", help="Variable holding topography data (default: z)")
@click.option("-o", "--outfile", default="topography.png", help="Output filename (default: topography.png)")
@click.option(
"-s",
"--scale",
nargs=2,
type=click.INT,
default=None,
help="Standard deviation in grid cells for Gaussian smoother (default: disable smoother)",
)
@functools.wraps(create_mask)
def cli(*args, **kwargs):
create_mask(**kwargs)
#!/usr/bin/env python
import functools
import subprocess
import shlex
import pipes
import sys
import os
import time
import click
LAST_N_FILENAME = "{identifier}.current_run"
CHILD_TIMEOUT = 10
POLL_DELAY = 0.1
class ShellCommand(click.ParamType):
name = "command"
def convert(self, value, param, ctx):
return shlex.split(value)
def get_current_n(filename):
if not os.path.isfile(filename):
return 0
with open(filename, "r") as f:
return int(f.read())
def write_next_n(n, filename):
with open(filename, "w") as f:
f.write(str(n))
def unparse(args):
return " ".join(map(pipes.quote, args))
def call_veros(cmd, name, n, runlen):
identifier = f"{name}.{n:0>4}"
prev_id = f"{name}.{n - 1:0>4}"
args = [
"-s",
"identifier",
identifier,
"-s",
"restart_output_filename",
"{identifier}.restart.h5",
"-s",
"runlen",
str(runlen),
]
if n:
args += ["-s", "restart_input_filename", f"{prev_id}.restart.h5"]
# make sure this isn't buffered
sys.stdout.write(f'\n >>> {" ".join(cmd + args)}\n\n')
sys.stdout.flush()
try:
subprocess.check_call(unparse(cmd + args), shell=True)
except subprocess.CalledProcessError:
raise RuntimeError(f"Run {n} failed, exiting")
def resubmit(identifier, num_runs, length_per_run, veros_cmd, callback):
"""Performs several runs of Veros back to back, using the previous run as restart input.
Intended to be used with scheduling systems (e.g. SLURM or PBS).
"""
last_n_filename = LAST_N_FILENAME.format(identifier=identifier)
current_n = get_current_n(last_n_filename)
if current_n >= num_runs:
return
call_veros(veros_cmd, identifier, current_n, length_per_run)
next_n = current_n + 1
write_next_n(next_n, last_n_filename)
if next_n >= num_runs:
return
next_proc = subprocess.Popen(unparse(callback), shell=True)
# catch immediately crashing processes
timeout = CHILD_TIMEOUT
while timeout > 0:
retcode = next_proc.poll()
if retcode is not None:
if retcode > 0:
# process crashed
raise RuntimeError(f"Callback exited with {retcode}")
else:
break
time.sleep(POLL_DELAY)
timeout -= POLL_DELAY
@click.command("veros-resubmit", short_help="Re-run a Veros setup several times")
@click.option("-i", "--identifier", required=True, help="Base identifier of the simulation")
@click.option("-n", "--num-runs", type=click.INT, required=True, help="Total number of runs to execute")
@click.option("-l", "--length-per-run", type=click.FLOAT, required=True, help="Length (in seconds) of each run")
@click.option(
"-c", "--veros-cmd", type=ShellCommand(), required=True, help="The command that is used to call veros (quoted)"
)
@click.option(
"--callback",
metavar="CMD",
type=ShellCommand(),
default=None,
help="Command to call after each run has finished (quoted, default: call self)",
)
@functools.wraps(resubmit)
def cli(*args, **kwargs):
if kwargs["callback"] is None:
kwargs["callback"] = sys.argv
resubmit(*args, **kwargs)
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment