Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1,100 changes: 584 additions & 516 deletions examples/adcp_example.ipynb

Large diffs are not rendered by default.

178 changes: 99 additions & 79 deletions examples/adv_example.ipynb

Large diffs are not rendered by default.

Binary file modified examples/data/dolfyn/test_data/Sig1000_tidal_bin.nc
Binary file not shown.
Binary file modified examples/data/dolfyn/test_data/dat_vm.mat
Binary file not shown.
Binary file modified examples/data/dolfyn/test_data/vmdas01_wh.nc
Binary file not shown.
103 changes: 33 additions & 70 deletions mhkit/dolfyn/adp/turbulence.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ def __init__(
self.diff_style = diff_style
self.orientation = orientation

def _diff_func(self, vel, u):
def _diff_func(self, vel, u, orientation):
"""Applies the chosen style of numerical differentiation to velocity data.

This method calculates the derivative of the velocity data 'vel' with respect to the 'range'
Expand All @@ -149,15 +149,23 @@ def _diff_func(self, vel, u):
The calculated derivative of the velocity data.
"""

if not orientation:
orientation = self.orientation
sign = 1
if orientation == "down":
sign *= -1

if self.diff_style == "first":
out = _diffz_first(vel[u].values, vel["range"].values)
return out, vel.range[1:]
return sign * out, vel.range[1:]

elif self.diff_style == "centered":
out = _diffz_centered(vel[u].values, vel["range"].values)
return out, vel.range[1:-1]
return sign * out, vel.range[1:-1]

elif self.diff_style == "centered_extended":
out = _diffz_centered_extended(vel[u].values, vel["range"].values)
return out, vel.range
return sign * out, vel.range

def dudz(self, vel, orientation=None):
"""
Expand All @@ -182,28 +190,24 @@ def dudz(self, vel, orientation=None):
'true vertical' direction.
"""

if not orientation:
orientation = self.orientation
sign = 1
if orientation == "down":
sign *= -1

dudz, rng = sign * self._diff_func(vel, 0)
dudz, rng = self._diff_func(vel, 0, orientation)
return xr.DataArray(
dudz,
coords=[rng, vel.time],
dims=["range", "time"],
attrs={"units": "s-1", "long_name": "Shear in X-direction"},
)

def dvdz(self, vel):
def dvdz(self, vel, orientation=None):
"""
The shear in the second velocity component.

Parameters
----------
vel : xarray.DataArray
ADCP raw velocity
orientation : str, default=ADPBinner.orientation
Direction ADCP is facing ('up' or 'down')

Returns
-------
Expand All @@ -217,22 +221,24 @@ def dvdz(self, vel):
'true vertical' direction.
"""

dvdz, rng = self._diff_func(vel, 1)
dvdz, rng = self._diff_func(vel, 1, orientation)
return xr.DataArray(
dvdz,
coords=[rng, vel.time],
dims=["range", "time"],
attrs={"units": "s-1", "long_name": "Shear in Y-direction"},
)

def dwdz(self, vel):
def dwdz(self, vel, orientation=None):
"""
The shear in the third velocity component.

Parameters
----------
vel : xarray.DataArray
ADCP raw velocity
orientation : str, default=ADPBinner.orientation
Direction ADCP is facing ('up' or 'down')

Returns
-------
Expand All @@ -246,7 +252,7 @@ def dwdz(self, vel):
'true vertical' direction.
"""

dwdz, rng = self._diff_func(vel, 2)
dwdz, rng = self._diff_func(vel, 2, orientation)
return xr.DataArray(
dwdz,
coords=[rng, vel.time],
Expand Down Expand Up @@ -537,7 +543,7 @@ def _beam_variance(self, ds, time, noise, beam_order, n_beams):
)

# Calculate along-beam velocity prime squared bar
bp2_ = np.empty((n_beams, len(ds.range), len(time))) * np.nan
bp2_ = np.empty((n_beams, len(ds["range"]), len(time))) * np.nan
for i, beam in enumerate(beam_order):
bp2_[i] = np.nanvar(self.reshape(beam_vel[beam]), axis=-1)

Expand Down Expand Up @@ -602,7 +608,7 @@ def reynolds_stress_4beam(self, ds, noise=None, orientation=None, beam_angle=Non
np.stack([upwp_ * np.nan, upwp_, vpwp_]).astype("float32"),
coords={
"tau": ["upvp_", "upwp_", "vpwp_"],
"range": ds.range,
"range": ds["range"],
"time": time,
},
attrs={"units": "m2 s-2", "long_name": "Specific Reynolds Stress Vector"},
Expand Down Expand Up @@ -646,7 +652,7 @@ def stress_tensor_5beam(
in pitch and roll. u'v'_ cannot be directly calculated by a 5-beam ADCP,
so it is approximated by the covariance of `u` and `v`. The uncertainty
introduced by using this approximation is small if deviations from pitch
and roll are small (< 10 degrees).
and roll are small (<= 5 degrees).

Dewey, R., and S. Stringer. "Reynolds stresses and turbulent kinetic
energy estimates from various ADCP beam configurations: Theory." J. of
Expand All @@ -663,7 +669,7 @@ def stress_tensor_5beam(

# Run through warnings
b_angle, noise = self._stress_func_warnings(
ds, beam_angle, noise, tilt_thresh=10
ds, beam_angle, noise, tilt_thresh=5
)

# Fetch beam order
Expand Down Expand Up @@ -713,7 +719,7 @@ def stress_tensor_5beam(
np.stack([upup_, vpvp_, wpwp_]).astype("float32"),
coords={
"tke": ["upup_", "vpvp_", "wpwp_"],
"range": ds.range,
"range": ds["range"],
"time": time,
},
attrs={
Expand Down Expand Up @@ -752,7 +758,7 @@ def stress_tensor_5beam(
np.stack([upvp_, upwp_, vpwp_]).astype("float32"),
coords={
"tau": ["upvp_", "upwp_", "vpwp_"],
"range": ds.range,
"range": ds["range"],
"time": time,
},
attrs={
Expand All @@ -763,49 +769,6 @@ def stress_tensor_5beam(

return tke_vec, stress_vec

def total_turbulent_kinetic_energy(
self, ds, noise=None, orientation=None, beam_angle=None
):
"""
Calculate magnitude of turbulent kinetic energy from 5-beam ADCP.

Parameters
----------
ds : xarray.Dataset
Raw dataset in beam coordinates
noise : int or xarray.DataArray, default=0 (time)
Doppler noise level in units of m/s
orientation : str, default=ds.attrs['orientation']
Direction ADCP is facing ('up' or 'down')
beam_angle : int, default=ds.attrs['beam_angle']
ADCP beam angle in units of degrees

Returns
-------
tke : xarray.DataArray
Turbulent kinetic energy magnitude

Notes
-----
This function is a wrapper around 'calc_stress_5beam' that then
combines the TKE components.

Warning: the integral length scale of turbulence captured by the
ADCP measurements (i.e. the size of turbulent structures) increases
with increasing range from the instrument.
"""

tke_vec = self.stress_tensor_5beam(
ds, noise, orientation, beam_angle, tke_only=True
)

tke = tke_vec.sum("tke") / 2
tke.attrs["units"] = "m2 s-2"
tke.attrs["long_name"] = ("TKE Magnitude",)
tke.attrs["standard_name"] = "specific_turbulent_kinetic_energy_of_sea_water"

return tke.astype("float32")

def check_turbulence_cascade_slope(self, psd, freq_range=[0.2, 0.4]):
"""
This function calculates the slope of the PSD, the power spectra
Expand Down Expand Up @@ -1027,11 +990,11 @@ def dissipation_rate_SF(self, vel_raw, r_range=[1, 5]):
)

if "range_b5" in vel_raw.dims:
rng = vel_raw.range_b5
time = self.mean(vel_raw.time_b5.values)
rng = vel_raw["range_b5"]
time = self.mean(vel_raw["time_b5"].values)
else:
rng = vel_raw.range
time = self.mean(vel_raw.time.values)
rng = vel_raw["range"]
time = self.mean(vel_raw["time"].values)

# bm shape is [range, ensemble time, 'data within ensemble']
bm = self.demean(vel_raw.values) # take out the ensemble mean
Expand Down Expand Up @@ -1139,7 +1102,7 @@ def friction_velocity(self, ds_avg, upwp_, z_inds=slice(1, 5), H=None):
raise TypeError("`z_inds` must be an instance of `slice(int,int)`.")

if not H:
H = ds_avg.depth.values
H = ds_avg["depth"].values
z = ds_avg["range"].values
upwp_ = upwp_.values

Expand All @@ -1151,6 +1114,6 @@ def friction_velocity(self, ds_avg, upwp_, z_inds=slice(1, 5), H=None):

return xr.DataArray(
u_star.astype("float32"),
coords={"time": ds_avg.time},
coords={"time": ds_avg["time"]},
attrs={"units": "m s-1", "long_name": "Friction Velocity"},
)
4 changes: 2 additions & 2 deletions mhkit/dolfyn/adv/motion.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import xarray as xr
import warnings
import scipy.signal as ss
from scipy.integrate import cumtrapz
from scipy.integrate import cumulative_trapezoid

from ..rotate import vector as rot
from ..rotate.api import _make_model, rotate2
Expand Down Expand Up @@ -188,7 +188,7 @@ def calc_velacc(
dat = np.concatenate(
(
np.zeros(list(hp.shape[:-1]) + [1]),
cumtrapz(hp, dx=1 / samp_freq, axis=-1),
cumulative_trapezoid(hp, dx=1 / samp_freq, axis=-1),
),
axis=-1,
)
Expand Down
20 changes: 10 additions & 10 deletions mhkit/dolfyn/adv/turbulence.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def __call__(self, ds, freq_units="rad/s", window="hann"):
def reynolds_stress(self, veldat, detrend=True):
"""
Calculate the specific Reynolds stresses
(cross-covariances of u,v,w in m^2/s^2)
(covariances of u,v,w in m^2/s^2)

Parameters
----------
Expand All @@ -76,7 +76,7 @@ def reynolds_stress(self, veldat, detrend=True):
if not isinstance(veldat, xr.DataArray):
raise TypeError("`veldat` must be an instance of `xarray.DataArray`.")

time = self.mean(veldat.time.values)
time = self.mean(veldat["time"].values)
vel = veldat.values

out = np.empty(self._outshape(vel[:3].shape)[:-1], dtype=np.float32)
Expand Down Expand Up @@ -144,7 +144,7 @@ def cross_spectral_density(

fs_in = self._parse_fs(fs)
n_fft = self._parse_nfft_coh(n_fft_coh)
time = self.mean(veldat.time.values)
time = self.mean(veldat["time"].values)
veldat = veldat.values
if len(np.shape(veldat)) != 2:
raise Exception(
Expand Down Expand Up @@ -391,7 +391,7 @@ def dissipation_rate_LT83(self, psd, U_mag, freq_range=[6.28, 12.57], noise=None
raise TypeError("`psd` must be an instance of `xarray.DataArray`.")
if len(U_mag.shape) != 1:
raise Exception("U_mag should be 1-dimensional (time)")
if len(psd.time) != len(U_mag.time):
if len(psd["time"]) != len(U_mag["time"]):
raise Exception("`U_mag` should be from ensembled-averaged dataset")
if not hasattr(freq_range, "__iter__") or len(freq_range) != 2:
raise ValueError("`freq_range` must be an iterable of length 2.")
Expand Down Expand Up @@ -459,7 +459,7 @@ def dissipation_rate_SF(self, vel_raw, U_mag, fs=None, freq_range=[2.0, 4.0]):

if not isinstance(vel_raw, xr.DataArray):
raise TypeError("`vel_raw` must be an instance of `xarray.DataArray`.")
if len(vel_raw.time) == len(U_mag.time):
if len(vel_raw["time"]) == len(U_mag["time"]):
raise Exception("`U_mag` should be from ensembled-averaged dataset")
if not hasattr(freq_range, "__iter__") or len(freq_range) != 2:
raise ValueError("`freq_range` must be an iterable of length 2.")
Expand Down Expand Up @@ -586,8 +586,8 @@ def dissipation_rate_TE01(self, dat_raw, dat_avg, freq_range=[6.28, 12.57]):

# Index data to be used
inds = (freq_range[0] < freq) & (freq < freq_range[1])
psd = dat_avg.psd[..., inds].values
freq = freq[inds].reshape([1] * (dat_avg.psd.ndim - 2) + [sum(inds)])
psd = dat_avg["psd"][..., inds].values
freq = freq[inds].reshape([1] * (dat_avg["psd"].ndim - 2) + [sum(inds)])

# Estimate values
# u & v components (equation 6)
Expand All @@ -606,7 +606,7 @@ def dissipation_rate_TE01(self, dat_raw, dat_avg, freq_range=[6.28, 12.57]):

return xr.DataArray(
out.astype("float32"),
coords={"time": dat_avg.psd.time},
coords={"time": dat_avg["psd"]["time"]},
dims="time",
attrs={
"units": "m2 s-3",
Expand Down Expand Up @@ -645,7 +645,7 @@ def integral_length_scales(self, a_cov, U_mag, fs=None):

if not isinstance(a_cov, xr.DataArray):
raise TypeError("`a_cov` must be an instance of `xarray.DataArray`.")
if len(a_cov.time) != len(U_mag.time):
if len(a_cov["time"]) != len(U_mag["time"]):
raise Exception("`U_mag` should be from ensembled-averaged dataset")

acov = a_cov.values
Expand All @@ -656,7 +656,7 @@ def integral_length_scales(self, a_cov, U_mag, fs=None):

return xr.DataArray(
L_int.astype("float32"),
coords={"dir": a_cov.dir, "time": a_cov.time},
coords={"dir": a_cov["dir"], "time": a_cov["time"]},
attrs={
"units": "m",
"long_name": "Integral Length Scale",
Expand Down
5 changes: 4 additions & 1 deletion mhkit/dolfyn/rotate/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -252,7 +252,10 @@ def set_declination(ds, declin, inplace=True):
Rdec,
)
if "heading" in ds:
ds["heading"] += angle
heading = ds["heading"] + angle
heading[heading > 180] -= 360
ds["heading"].values = heading

if rotate2earth:
rotate2(ds, "earth", inplace=True)
if "principal_heading" in ds.attrs:
Expand Down
Loading