Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Binary file added examples/data/dolfyn/Sig1000_dp_echo.ad2cp
Binary file not shown.
Binary file added examples/data/dolfyn/Sig1000_online.ad2cp
Binary file not shown.
Binary file added examples/data/dolfyn/Sig100_avg.ad2cp
Binary file not shown.
Binary file added examples/data/dolfyn/Sig100_raw_avg.ad2cp
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file added examples/data/dolfyn/test_data/Sig1000_online.nc
Binary file not shown.
Binary file added examples/data/dolfyn/test_data/Sig100_avg.nc
Binary file not shown.
Binary file added examples/data/dolfyn/test_data/Sig100_raw_avg.nc
Binary file not shown.
Binary file added examples/data/dolfyn/test_data/Sig500_dp_ice1.nc
Binary file not shown.
Binary file not shown.
8 changes: 8 additions & 0 deletions mhkit/dolfyn/io/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,6 +164,14 @@ def _create_dataset(data):
"""

tag = ["_avg", "_b5", "_echo", "_bt", "_gps", "_altraw", "_altraw_avg", "_sl"]
# If burst velocity not measured
if "vel" not in data["data_vars"]:
# dual profile where burst velocity is not measured but echo sounder is
if "vel_avg" in data["data_vars"]:
data["coords"]["time"] = data["coords"]["time_avg"]
else:
t_vars = [t for t in data["coords"] if "time" in t]
data["coords"]["time"] = data["coords"][t_vars[0]]

ds_dict = {}
for key in data["coords"]:
Expand Down
239 changes: 108 additions & 131 deletions mhkit/dolfyn/io/nortek2.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
import numpy as np
from struct import unpack, calcsize
import json
import logging
import warnings
from struct import unpack, calcsize
from pathlib import Path
import logging
import json
import numpy as np

from . import nortek2_defs as defs
from . import nortek2_lib as lib
Expand All @@ -14,17 +14,14 @@
from ..time import epoch2dt64, _fill_time_gaps


int32_max = np.iinfo(np.int32).max


def read_signature(
filename,
userdata=True,
nens=None,
rebuild_index=False,
debug=False,
dual_profile=False,
**kwargs
**kwargs,
):
"""
Read a Nortek Signature (.ad2cp) datafile
Expand Down Expand Up @@ -143,30 +140,22 @@ def read_signature(


class _Ad2cpReader:
def __init__(
self,
fname,
endian=None,
bufsize=None,
rebuild_index=False,
debug=False,
dual_profile=False,
):
def __init__(self, fname, rebuild_index, debug, dual_profile):
self.fname = fname
self.debug = debug
self._check_nortek(endian)
self.f.seek(0, 2) # Seek to end
self._eof = self.f.tell()
self.start_pos = self._check_header()
# Open file, check endianess, and find filelength
self._check_nortek2()
# Generate indexing file
self._index, self._dp = lib.get_index(
fname,
pos=self.start_pos,
pos=0,
eof=self._eof,
rebuild=rebuild_index,
debug=debug,
dp=dual_profile,
)
self._open(bufsize)
# Open file for reading
self._open()
self.filehead_config = self._read_filehead_config_string()
self._ens_pos = self._index["pos"][
lib._boolarray_firstensemble_ping(self._index)
Expand All @@ -176,52 +165,18 @@ def __init__(
self._init_burst_readers()
self.unknown_ID_count = {}

def _calc_lastblock_iswhole(
self,
):
blocksize, blocksize_count = np.unique(
np.diff(self._ens_pos), return_counts=True
)
standard_blocksize = blocksize[blocksize_count.argmax()]
return (self._eof - self._ens_pos[-1]) == standard_blocksize

def _check_nortek(self, endian):
def _check_nortek2(self):
self._open(10)
byts = self.f.read(2)
if endian is None:
if unpack("<" + "BB", byts) == (165, 10):
endian = "<"
elif unpack(">" + "BB", byts) == (165, 10):
endian = ">"
else:
raise Exception(
"I/O error: could not determine the 'endianness' "
"of the file. Are you sure this is a Nortek "
"AD2CP file?"
)
self.endian = endian

def _check_header(self):
def find_all(s, c):
idx = s.find(c)
while idx != -1:
yield idx
idx = s.find(c, idx + 1)

# Open the entire file to find start header
if self._eof >= int32_max:
init_buffer = int32_max
else:
init_buffer = self._eof
self._open(init_buffer)
pk = self.f.peek(1)
# Search for multiple saved headers
found = [i for i in find_all(pk, b"GETCLOCKSTR")]
if len(found) < 2:
return 0
else:
start_idx = found[-1] - 11
return start_idx
if not (unpack("<BB", byts) == (165, 10)):
raise Exception(
"I/O error: could not determine the 'endianness' "
"of the file. Are you sure this is a Nortek "
"AD2CP file?"
)
self.f.seek(0, 2) # Seek to end
self._eof = self.f.tell()
self.f.close()

def _open(self, bufsize=None):
if bufsize is None:
Expand Down Expand Up @@ -269,6 +224,13 @@ def _read_filehead_config_string(
out2[ky] = out[ky]
return out2

def _calc_lastblock_iswhole(self):
blocksize, blocksize_count = np.unique(
np.diff(self._ens_pos), return_counts=True
)
standard_blocksize = blocksize[blocksize_count.argmax()]
return (self._eof - self._ens_pos[-1]) == standard_blocksize

def _init_burst_readers(
self,
):
Expand Down Expand Up @@ -322,6 +284,9 @@ def n_id(id):

def _read_hdr(self, do_cs=False):
res = defs.header.read2dict(self.f, cs=do_cs)
if res["hsz"] == 12:
self.f.seek(-10, 1)
res = defs.header12.read2dict(self.f, cs=do_cs)
if res["sync"] != 165:
raise Exception("Out of sync!")
return res
Expand Down Expand Up @@ -764,11 +729,25 @@ def _reduce(data):
dc["range_avg"] = (np.arange(dv["vel_avg"].shape[1]) + 1) * da[
"cell_size_avg"
] + da["blank_dist_avg"]
if "orientmat" not in dv:
# Check to see if orientmat_avg was written (only for instruments with an AHRS)
if ("orientmat" not in dv) and ("orientmat_avg" in dv):
dv["orientmat"] = dv.pop("orientmat_avg")
elif "heading" not in dv: # need these to write orientation matrix
dv["heading"] = dv.pop("heading_avg")
dv["pitch"] = dv.pop("pitch_avg")
dv["roll"] = dv.pop("roll_avg")
tmat = da["filehead_config"]["XFAVG"]
da["fs"] = da["filehead_config"]["PLAN"]["MIAVG"]
da["avg_interval_sec"] = da["filehead_config"]["AVG"]["AI"]
da["duty_cycle_interval"] = dci = da["filehead_config"]["PLAN"]["MIAVG"]
da["duty_cycle_n_burst"] = da["filehead_config"]["AVG"]["NPING"]
da["duty_cycle_avg_interval"] = dca = da["filehead_config"]["AVG"]["AI"]
fs = da["duty_cycle_n_burst"] / da["duty_cycle_avg_interval"]
da["duty_cycle_description"] = (
f"Measurements collected for {dca / 60} minutes at {fs} Hz every {dci / 60} minutes"
)
if "fs" in da:
da["fs_avg"] = fs
else:
da["fs"] = fs
da["bandwidth"] = da["filehead_config"]["AVG"]["BW"]
if "vel_b5" in dv:
# vel_b5 is sometimes shape 2 and sometimes shape 3
Expand Down Expand Up @@ -799,71 +778,69 @@ def _reduce(data):
tm[irow, icol] = tmat["M" + str(irow + 1) + str(icol + 1)]
dv["beam2inst_orientmat"] = tm

# If burst velocity isn't used, need to copy one for 'time'
if "time" not in dc:
for val in dc:
if "time" in val:
time = val
dc["time"] = dc[time]


def split_dp_datasets(ds):
"""
Splits a dataset containing dual profiles into individual profiles
"""

# Figure out which variables belong to which profile based on length of time variables
t_dict = {}
for t in ds.coords:
if "time" in t:
t_dict[t] = ds[t].size

other_coords = []
for key, val in t_dict.items():
if val != t_dict["time"]:
if key.endswith("altraw"):
# altraw goes with burst, altraw_avg goes with avg
continue
other_coords.append(key)
# Fetch variables, coordinates, and attrs for second profiling configuration
other_vars = [
v for v in ds.data_vars if any(x in ds[v].coords for x in other_coords)
]
other_tags = [s.split("_")[-1] for s in other_coords]
other_coords += [v for v in ds.coords if any(x in v for x in other_tags)]
other_attrs = [s for s in ds.attrs if any(x in s for x in other_tags)]
critical_attrs = [
"inst_model",
"inst_make",
"inst_type",
"fs",
"orientation",
"orient_status",
"has_imu",
"beam_angle",
]

# Create second dataset
ds2 = type(ds)()
for a in other_attrs + critical_attrs:
ds2.attrs[a] = ds.attrs[a]
for v in other_vars:
ds2[v] = ds[v]
# Set rotate_vars
rotate_vars2 = [v for v in ds.attrs["rotate_vars"] if v in other_vars]
ds2.attrs["rotate_vars"] = rotate_vars2
# Set orientation matricies
ds2["beam2inst_orientmat"] = ds["beam2inst_orientmat"]
ds2 = ds2.rename({"orientmat_" + other_tags[0]: "orientmat"})
# Set original coordinate system
cy = ds2.attrs["coord_sys_axes_" + other_tags[0]]
ds2.attrs["coord_sys"] = {"XYZ": "inst", "ENU": "earth", "beam": "beam"}[cy]
ds2 = _set_coords(ds2, ref_frame=ds2.coord_sys)

# Clean up first dataset
[ds.attrs.pop(ky) for ky in other_attrs]
ds = ds.drop_vars(other_vars + other_coords)
for itm in rotate_vars2:
ds.attrs["rotate_vars"].remove(itm)

return ds, ds2
# Get averaged time variables
other_coords = [t for t in ds.coords if "_avg" in t]
# Check if ice tracking was used
if ("vel" in ds.data_vars) and ("time_bt" in ds.coords):
if ds["time"].size != ds["time_bt"].size:
other_coords.append("time_bt")

if other_coords:
# Fetch variables, coordinates, and attrs for second profiling configuration
other_vars = [
v
for v in ds.data_vars
if any(x in ds[v].coords for x in other_coords)
or any(ds[v].shape[-1] == ds[x].size for x in other_coords)
]
other_attrs = [s for s in ds.attrs if ("_avg" in s) or ("duty_cycle" in s)]
critical_attrs = [
"inst_model",
"inst_make",
"inst_type",
"fs",
"orientation",
"orient_status",
"has_imu",
"beam_angle",
]

# Create second dataset
for a in other_attrs + critical_attrs:
ds2.attrs[a] = ds.attrs[a]
for v in other_vars:
ds2[v] = ds[v]
# Set rotate_vars
rotate_vars2 = [v for v in ds.attrs["rotate_vars"] if v in other_vars]
ds2.attrs["rotate_vars"] = rotate_vars2
# Set orientation matricies
ds2["beam2inst_orientmat"] = ds["beam2inst_orientmat"]
if ds.attrs["has_imu"]:
ds2 = ds2.rename({"orientmat_avg": "orientmat"})
else:
ds2["orientmat"] = ds["orientmat"]
# Set original coordinate system
cy = ds2.attrs["coord_sys_axes_avg"]
ds2.attrs["coord_sys"] = {"XYZ": "inst", "ENU": "earth", "beam": "beam"}[cy]
# Need to add beam coordinate
if "beam" not in ds2.dims:
ds2 = ds2.assign_coords({"beam": ds["beam"], "dir": ds["dir"]})
ds2 = _set_coords(ds2, ref_frame=ds2.coord_sys)

# Clean up first dataset
[ds.attrs.pop(ky) for ky in other_attrs]
ds = ds.drop_vars(other_vars + other_coords)
for itm in rotate_vars2:
ds.attrs["rotate_vars"].remove(itm)

if ds2:
return ds, ds2
else:
return ds
17 changes: 16 additions & 1 deletion mhkit/dolfyn/io/nortek2_defs.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import numpy as np
from copy import copy
from struct import Struct
import numpy as np

from . import nortek2_lib as lib


Expand Down Expand Up @@ -165,6 +166,7 @@ def __call__(self, array):
return array


# 10 byte header
header = _DataDef(
[
("sync", "B", [], None),
Expand All @@ -177,6 +179,19 @@ def __call__(self, array):
]
)

# 12 byte header
header12 = _DataDef(
[
("sync", "B", [], None),
("hsz", "B", [], None),
("id", "B", [], None),
("fam", "B", [], None),
("sz", "I", [], None),
("cs", "H", [], None),
("hcs", "H", [], None),
]
)

_burst_hdr = [
("ver", "B", [], None),
("DatOffset", "B", [], None),
Expand Down
Loading
Loading