Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -58,9 +58,9 @@ jobs:
- name: Download and unzip required data files
run: |
mkdir -p "$HOME/auv_data"
# Dorado389.tar.gz contains original log files of short dorado mission for testing: 2003.339.04
# Dorado389.tar.gz contains original log files of short dorado mission for testing: 2011.256.02
wget -q --no-check-certificate -O "$HOME/auv_data/Dorado389.tar.gz" https://stoqs.mbari.org/auv_data/Dorado389.tar.gz
tar -xf "$HOME/auv_data/Dorado389.tar.gz" -C "$HOME/auv_data" 2> /dev/null
tar -xvf "$HOME/auv_data/Dorado389.tar.gz" -C "$HOME/auv_data" 2> /dev/null
# i2MAP.tar.gz contains original log files of i2MAP mission for testing: 2018.34.01
wget -q --no-check-certificate -O "$HOME/auv_data/i2map.tar.gz" https://stoqs.mbari.org/auv_data/i2map.tar.gz
tar -xf "$HOME/auv_data/i2map.tar.gz" -C "$HOME/auv_data" 2> /dev/null
Expand Down
32 changes: 3 additions & 29 deletions src/data/calibrate.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@
import pandas as pd
import pyproj
from AUV import monotonic_increasing_time_indices
from hs2_proc import hs2_calc_bb, hs2_read_cal_file
from hs2_proc import compute_backscatter, hs2_calc_bb, hs2_read_cal_file
from logs2netcdfs import BASE_PATH, MISSIONLOGS, MISSIONNETCDFS, TIME, TIME60HZ, AUV_NetCDF
from matplotlib import patches
from scipy import signal
Expand Down Expand Up @@ -594,32 +594,6 @@ def _beam_transmittance_from_volts(combined_nc, nc) -> tuple[float, float]:
return Tr, c


def _compute_backscatter(wavelength_nm: float, salinity: float, volScat: float): # noqa: N803
# Cribbed from https://mbari.slack.com/archives/C04ETLY6T7V/p1710457297254969?thread_ts=1710348431.316509&cid=C04ETLY6T7V
# This is the same computation used for LRAUV ecopucks. Used here for Dorado ecopuck
# following the conversion to "scaled" output using scale_factor and dark counts.
theta = 117.0 / 57.29578 # radians
d = 0.09

# These calculations are from the Triplet Puck User's Guide, Revision H
Bw = (
1.38
* (wavelength_nm / 500.0) ** (-4.32)
* (1.0 + 0.3 * salinity / 37.0)
* 1e-4
* (1.0 + np.cos(theta) ** 2.0 * (1.0 - d) / (1.0 + d))
)
Bp = volScat - Bw
if salinity < 35.0: # noqa: PLR2004
bw = 0.0022533 * (wavelength_nm / 500.0) ** (-4.23) * 1e-4
else:
bw = 0.0029308 * (wavelength_nm / 500.0) ** (-4.24) * 1e-4
bbw = bw / 2.0
bbp = 2.0 * np.pi * 1.1 * Bp

return bbw, bbp


class SensorInfo:
pass

Expand Down Expand Up @@ -3002,7 +2976,7 @@ def _ecopuck_process(self, sensor, cf):
source = self.sinfo[sensor]["data_filename"]
coord_str = f"{sensor}_time {sensor}_depth {sensor}_latitude {sensor}_longitude"
beta_700 = cf.bbp700_scale_factor * (orig_nc["BB_Sig"].to_numpy() - cf.bbp700_dark_counts)
_, bbp = _compute_backscatter(700, 35.2, beta_700) # Use an average salinity of 35.2
_, bbp = compute_backscatter(700, 35.2, beta_700) # Use an average salinity of 35.2

self.combined_nc["ecopuck_bbp700"] = xr.DataArray(
bbp,
Expand All @@ -3017,7 +2991,7 @@ def _ecopuck_process(self, sensor, cf):
"comment": (
f"BB_Sig from {source} converted to beta_700 using scale factor "
f"{cf.bbp700_scale_factor} and dark counts {cf.bbp700_dark_counts}, "
"then converted to bbp700 by the _compute_backscatter() function."
"then converted to bbp700 by the compute_backscatter() function."
),
}

Expand Down
3 changes: 2 additions & 1 deletion src/data/create_products.py
Original file line number Diff line number Diff line change
Expand Up @@ -385,7 +385,8 @@ def gulper_odv(self, sec_bnds: int = 1) -> str: # noqa: C901, PLR0912, PLR0915

gulper = Gulper()
gulper.args = argparse.Namespace()
gulper.args.auv_name = "dorado"
gulper.args.base_path = self.args.base_path
gulper.args.auv_name = self.args.auv_name
gulper.args.mission = self.args.mission
gulper.args.local = self.args.local
gulper.args.verbose = self.args.verbose
Expand Down
20 changes: 9 additions & 11 deletions src/data/gulper.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

import requests
import xarray as xr
from logs2netcdfs import TIMEOUT
from logs2netcdfs import MISSIONLOGS, MISSIONNETCDFS, TIMEOUT


class Gulper:
Expand All @@ -33,19 +33,18 @@ def mission_start_esecs(self) -> float:

# Get the first time record from mission's navigation.nc file
if self.args.local:
base_path = Path(__file__).parent.joinpath("../../data/auv_data").resolve()
url = Path(
base_path,
"dorado",
"missionnetcdfs",
self.args.base_path,
self.args.auv_name,
MISSIONNETCDFS,
self.args.mission,
"navigation.nc",
)
else:
# Relies on auv-python having processed the mission
url = os.path.join( # noqa: PTH118
"http://dods.mbari.org/opendap/data/auvctd/",
"missionnetcdfs",
MISSIONNETCDFS,
self.args.mission.split(".")[0],
self.args.mission.split(".")[0] + self.args.mission.split(".")[1],
self.args.mission,
Expand All @@ -64,11 +63,10 @@ def parse_gulpers(self, sec_delay: int = 1) -> dict: # noqa: C901, PLR0912, PLR
bottles = {}
if self.args.local:
# Read from local file - useful for testing in auv-python
base_path = Path(__file__).parent.joinpath("../../data/auv_data").resolve()
mission_dir = Path(
base_path,
"dorado",
"missionlogs",
self.args.base_path,
self.args.auv_name,
MISSIONLOGS,
self.args.mission,
)
syslog_file = Path(mission_dir, "syslog")
Expand All @@ -81,7 +79,7 @@ def parse_gulpers(self, sec_delay: int = 1) -> dict: # noqa: C901, PLR0912, PLR
else:
syslog_url = os.path.join( # noqa: PTH118
"http://dods.mbari.org/data/auvctd/",
"missionlogs",
MISSIONLOGS,
self.args.mission.split(".")[0],
self.args.mission.split(".")[0] + self.args.mission.split(".")[1],
self.args.mission,
Expand Down
58 changes: 31 additions & 27 deletions src/data/hs2_proc.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# noqa: INP001

from collections import defaultdict
from math import exp, pi
from math import exp
from pathlib import Path

import numpy as np
Expand Down Expand Up @@ -98,6 +98,32 @@ def _int_signer(ints_in):
return np.array(signed_ints)


def compute_backscatter(wavelength_nm: float, salinity: float, volScat: float): # noqa: N803
# Cribbed from https://mbari.slack.com/archives/C04ETLY6T7V/p1710457297254969?thread_ts=1710348431.316509&cid=C04ETLY6T7V
# This is the same computation used for LRAUV ecopucks. Used here for Dorado ecopuck
# following the conversion to "scaled" output using scale_factor and dark counts.
theta = 117.0 / 57.29578 # radians
d = 0.09

# These calculations are from the Triplet Puck User's Guide, Revision H
Bw = (
1.38
* (wavelength_nm / 500.0) ** (-4.32)
* (1.0 + 0.3 * salinity / 37.0)
* 1e-4
* (1.0 + np.cos(theta) ** 2.0 * (1.0 - d) / (1.0 + d))
)
Bp = volScat - Bw
if salinity < 35.0: # noqa: PLR2004
bw = 0.0022533 * (wavelength_nm / 500.0) ** (-4.23) * 1e-4
else:
bw = 0.0029308 * (wavelength_nm / 500.0) ** (-4.24) * 1e-4
bbw = bw / 2.0
bbp = 2.0 * np.pi * 1.1 * Bp

return bbw, bbp


def hs2_calc_bb(orig_nc, cals):
# Some original comments from hs2_calc_bb.m
# % Date Created: June 21, 2007
Expand Down Expand Up @@ -141,34 +167,12 @@ def hs2_calc_bb(orig_nc, cals):
# Replaces "RawTempValue" as the name, helpful when looking at things in the debugger
beta_uncorr.name = f"beta_uncorr_Ch{chan}"
wavelength = int(cals[f"Ch{chan}"]["Name"][2:])
beta_w, b_bw = purewater_scatter(wavelength)

chi = 1.08
b_b_uncorr = ((2 * pi * chi) * (beta_uncorr - beta_w)) + b_bw

globals()[f"bb{wavelength}_uncorr"] = b_b_uncorr
globals()[f"bbp{wavelength}_uncorr"] = b_b_uncorr - b_bw

# ESTIMATION OF KBB AND SIGMA FUNCTION
a = typ_absorption(wavelength)
b_b_tilde = 0.015
b = (b_b_uncorr - b_bw) / b_b_tilde

K_bb = a + 0.4 * b
k_1 = 1.0
k_exp = float(cals[f"Ch{chan}"]["SigmaExp"])
sigma = k_1 * np.exp(k_exp * K_bb)

b_b_corr = sigma * b_b_uncorr
# Need to test subtracting b_bw here instead of after multiplying by sigma
b_bp_corr = sigma * (b_b_uncorr - b_bw)

setattr(hs2, f"bb{wavelength}", b_b_corr)
# Legacy code that subtracts b_bw after multiplying by sigma
setattr(hs2, f"bbp{wavelength}", b_b_corr - b_bw)
# This is likely the correct way to do it, with b_bw subtracted before multiplying by sigma
setattr(hs2, f"bbp{wavelength}_fixed", b_bp_corr)
# Use compute_backscatter - same as used for ecopucks - to calculate bbp
_, bbp = compute_backscatter(wavelength, 35.2, beta_uncorr)
setattr(hs2, f"bbp{wavelength}", bbp)

# Fluorescence
# -% 'hs2.fl700_uncorr = (hs2.Snorm3.*50)./((1 + str2num(CAL.Ch(3).TempCoeff).*(hs2.Temp-str2num(CAL.General.CalTemp))).*hs2.Gain3.*str2num(CAL.Ch(3).RNominal));' # noqa: E501
denom = (
(
Expand Down
1 change: 1 addition & 0 deletions src/data/process.py
Original file line number Diff line number Diff line change
Expand Up @@ -355,6 +355,7 @@ def archive(self, mission: str, add_logger_handlers: bool = True) -> None: # no
def create_products(self, mission: str) -> None:
cp = CreateProducts()
cp.args = argparse.Namespace()
cp.args.base_path = self.args.base_path
cp.args.auv_name = self.vehicle
cp.args.mission = mission
cp.args.local = self.args.local
Expand Down
130 changes: 0 additions & 130 deletions src/data/test_hs2_proc.py

This file was deleted.

16 changes: 15 additions & 1 deletion src/data/test_process_dorado.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,4 +26,18 @@ def test_process_dorado(complete_dorado_processing):
assert nc_file.exists() # noqa: S101
assert time() - nc_file.stat().st_mtime < MAX_SECS # noqa: S101
assert nc_file.stat().st_size > 0 # noqa: S101
assert nc_file.stat().st_size == 621235 # noqa: PLR2004, S101
# Testing that the file size matches a specific value is crude,
# but it will alert us if a code change unexpectedly changes the file size.
# If code changes are expected to change the file size then we should
# update the expected size here.
EXPECTED_SIZE = 621235
EXPECTED_SIZE_LOCAL = 621298
if str(proc.args.base_path).startswith("/home/runner"):
# The size is different in GitHub Actions, maybe due to different metadata
assert nc_file.stat().st_size == EXPECTED_SIZE # noqa: S101
else:
# The size is different locally, maybe due to different metadata
# It's likely that the size will be different on different machines
# as these kind of metadata items are added to nc_file:
# NC_GLOBAL.history: Created by /Users/mccann/GitHub/auv-python/src/data/process_dorado.py ... # noqa: E501
assert nc_file.stat().st_size == EXPECTED_SIZE_LOCAL # noqa: S101
Loading