Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
18 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 11 additions & 4 deletions .vscode/launch.json
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,9 @@
//"args": ["--auv_name", "dorado", "--mission", "2017.044.00", "--noinput", "-v"]
//"args": ["--auv_name", "i2map", "--mission", "2019.157.02", "--noinput", "-v", "1"]
//"args": ["--auv_name", "dorado", "--mission", "2010.265.00", "--noinput", "-v"]
"args": ["--auv_name", "dorado", "--mission", "2023.324.00", "--noinput", "-v", "--vehicle_dir", "/Volumes/AUVCTD/missionlogs"]
//"args": ["--auv_name", "dorado", "--mission", "2023.324.00", "--noinput", "-v", "--vehicle_dir", "/Volumes/AUVCTD/missionlogs"]
// Mission suffering from GPS Rollover bug. Add 1024 * 7 * 24 * 3600 = 619315200 seconds
"args": ["--auv_name", "dorado", "--mission", "2025.316.02", "--noinput", "-v", "--vehicle_dir", "/Volumes/AUVCTD/missionlogs", "--add_seconds", "619315200" ]
},
{
"name": "1.1 - lopcToNetCDF",
Expand Down Expand Up @@ -60,6 +62,8 @@
//"args": ["-v", "1", "--log_file", "brizo/missionlogs/2025/20250909_20250915/20250914T080941/202509140809_202509150109.nc4", "--plot_time", "/latitude_time"]
// brizo 20250916T230652 has several ESP Samples from stoqs_lrauv_sep2025
"args": ["-v", "2", "--log_file", "brizo/missionlogs/2025/20250916_20250922/20250916T230652/202509162306_202509180305.nc4", "--plot_time", "/longitude_time"]
//"args": ["-v", "1", "--log_file", "brizo/missionlogs/2025/20250909_20250915/20250914T080941/202509140809_202509150109_cleaned_by_quinn.nc4", "--plot_time", "/longitude_time"]
//"args": ["-v", "1", "--log_file", "brizo/missionlogs/2025/20250909_20250915/20250914T080941/202509140809_202509150109_cleaned_by_quinn_latlon.nc4", "--plot_time", "/longitude_time"]
},
{
"name": "2.0 - calibrate.py",
Expand Down Expand Up @@ -137,7 +141,8 @@
//"args": ["-v", "1", "--auv_name", "dorado", "--mission", "2004.236.00"],
//"args": ["-v", "1", "--auv_name", "dorado", "--mission", "2008.289.03"],
//"args": ["-v", "1", "--auv_name", "dorado", "--mission", "2023.192.01"],
"args": ["-v", "1", "--auv_name", "dorado", "--mission", "2024.317.01"],
//"args": ["-v", "1", "--auv_name", "dorado", "--mission", "2024.317.01"],
"args": ["-v", "1", "--log_file", "brizo/missionlogs/2025/20250916_20250922/20250916T230652/202509162306_202509180305.nc4"]
},
{
"name": "3.1 - align.py for LRAUV --log_file",
Expand All @@ -146,7 +151,8 @@
"program": "${workspaceFolder}/src/data/align.py",
"console": "integratedTerminal",
"justMyCode": false,
"args": ["-v", "1", "--log_file", "brizo/missionlogs/2025/20250909_20250915/20250914T080941/202509140809_202509150109.nc4"],
//"args": ["-v", "1", "--log_file", "brizo/missionlogs/2025/20250909_20250915/20250914T080941/202509140809_202509150109.nc4"],
"args": ["-v", "1", "--log_file", "brizo/missionlogs/2025/20250916_20250922/20250916T230652/202509162306_202509180305.nc4"]
},
{
"name": "4.0 - resample.py",
Expand Down Expand Up @@ -306,7 +312,8 @@
//"args": ["-v", "1", "--noinput", "--no_cleanup", "--mission", "2008.010.10"]
//"args": ["-v", "2", "--mission", "2004.029.03", "--noinput", "--no_cleanup"],
//"args": ["-v", "1", "--mission", "2023.192.01", "--noinput", "--no_cleanup"],
"args": ["-v", "1", "--mission", "2010.151.04", "--noinput", "--no_cleanup", "--clobber"],
//"args": ["-v", "1", "--mission", "2010.151.04", "--noinput", "--no_cleanup", "--clobber"],
"args": ["-v", "1", "--mission", "2025.316.02", "--noinput", "--no_cleanup", "--add_seconds", "619315200"],

},
{
Expand Down
4 changes: 3 additions & 1 deletion src/data/align.py
Original file line number Diff line number Diff line change
Expand Up @@ -447,7 +447,9 @@ def process_combined(self, log_file: str) -> None: # noqa: C901, PLR0912, PLR09
if variable in nav_coords.values():
continue

# Extract group name from variable (e.g., "ctd_seabird_salinity" -> "ctd_seabird")
# Extract group name from variable following convention for LRAUV data
# enforced in combine.py where first underscore separates group name
# from the rest of the variable name
var_parts = variable.split("_")
if len(var_parts) < 2: # noqa: PLR2004
self.logger.debug("Skipping variable with unexpected name format: %s", variable)
Expand Down
2 changes: 2 additions & 0 deletions src/data/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,7 @@ def complete_dorado_processing():
ns.no_cleanup = True
ns.skip_download_process = False
ns.num_cores = 1
ns.add_seconds = None
ns.verbose = 1
proc.args = ns
proc.process_missions(TEST_START_YEAR)
Expand Down Expand Up @@ -147,6 +148,7 @@ def complete_i2map_processing():
ns.skip_download_process = False
ns.last_n_days = 0
ns.num_cores = 1
ns.add_seconds = None
ns.verbose = 1
proc.args = ns
proc.process_missions(TEST_START_YEAR)
Expand Down
40 changes: 19 additions & 21 deletions src/data/create_products.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,28 +88,26 @@ class CreateProducts:
}

def _open_ds(self):
if self.args.local:
local_nc = Path(
BASE_PATH,
self.args.auv_name,
MISSIONNETCDFS,
self.args.mission,
f"{self.args.auv_name}_{self.args.mission}_{FREQ}.nc",
)
local_nc = Path(
BASE_PATH,
self.args.auv_name,
MISSIONNETCDFS,
self.args.mission,
f"{self.args.auv_name}_{self.args.mission}_{FREQ}.nc",
)
# Requires mission to have been processed and archived to AUVCTD
dap_url = os.path.join( # noqa: PTH118
AUVCTD_OPENDAP_BASE,
"surveys",
self.args.mission.split(".")[0],
"netcdf",
f"{self.args.auv_name}_{self.args.mission}_{FREQ}.nc",
)
try:
self.ds = xr.open_dataset(dap_url)
except OSError:
self.logger.debug("%s not available yet", dap_url)
self.ds = xr.open_dataset(local_nc)
else:
# Requires mission to have been processed and archived to AUVCTD
dap_url = os.path.join( # noqa: PTH118
AUVCTD_OPENDAP_BASE,
"surveys",
self.args.mission.split(".")[0],
"netcdf",
f"{self.args.auv_name}_{self.args.mission}_{FREQ}.nc",
)
try:
self.ds = xr.open_dataset(dap_url)
except OSError as err:
self.logger.error("Error opening %s: %s", dap_url, err) # noqa: TRY400

def _grid_dims(self) -> tuple:
# From Matlab code in plot_sections.m:
Expand Down
48 changes: 27 additions & 21 deletions src/data/gulper.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,27 +32,33 @@ def mission_start_esecs(self) -> float:
return self.args.start_esecs

# Get the first time record from mission's navigation.nc file
if self.args.local:
url = Path(
self.args.base_path,
self.args.auv_name,
MISSIONNETCDFS,
self.args.mission,
"navigation.nc",
)
else:
# Relies on auv-python having processed the mission
url = os.path.join( # noqa: PTH118
"http://dods.mbari.org/opendap/data/auvctd/",
MISSIONNETCDFS,
self.args.mission.split(".")[0],
self.args.mission.split(".")[0] + self.args.mission.split(".")[1],
self.args.mission,
"navigation.nc",
)
self.logger.info("Reading mission start time from %s", url)
ds = xr.open_dataset(url)
return ds.time[0].to_numpy().astype("float64") / 1e9
file_path = Path(
self.args.base_path,
self.args.auv_name,
MISSIONNETCDFS,
self.args.mission,
"navigation.nc",
)
# Relies on auv-python having processed the mission
url = os.path.join( # noqa: PTH118
"http://dods.mbari.org/opendap/data/auvctd/",
MISSIONNETCDFS,
self.args.mission.split(".")[0],
self.args.mission.split(".")[0] + self.args.mission.split(".")[1],
self.args.mission,
"navigation.nc",
)
try:
self.logger.info("Reading mission start time from url = %s", url)
ds = xr.open_dataset(url)
except OSError:
self.logger.info("%s not available yet", url)
self.logger.info("Reading mission start time from file_path = %s", file_path)
ds = xr.open_dataset(file_path)

epoch_seconds = float(ds.time[0].astype("datetime64[s]").astype("int64"))
self.logger.info("Mission start time = %s epoch seconds", epoch_seconds)
return epoch_seconds

def parse_gulpers(self, sec_delay: int = 1) -> dict: # noqa: C901, PLR0912, PLR0915
"""Parse the Gulper times and bottle numbers from the auvctd syslog file.
Expand Down
44 changes: 41 additions & 3 deletions src/data/logs2netcdfs.py
Original file line number Diff line number Diff line change
Expand Up @@ -554,8 +554,36 @@ def _create_variable( # noqa: PLR0913
self.logger.error("data seriously does not match shape") # noqa: TRY400
raise

def correct_times(self, log_data, add_seconds: int = 0):
"""Correct timeTag variables in log_data by adding add_seconds
returning the corrected log_data"""
new_log_data = []
for variable in log_data:
if variable.data_type == "timeTag":
self.logger.info(
"Adding to time values: %d seconds",
add_seconds,
)
# Create a new log_record with corrected data
corrected_variable = log_record(
variable.data_type,
variable.short_name,
variable.long_name,
variable.units,
variable.instrument_name,
[tv + add_seconds for tv in variable.data], # ← New data list
)
new_log_data.append(corrected_variable)
else:
# For non-timeTag variables
new_log_data.append(variable) # or create a copy if needed
return new_log_data

def write_variables(self, log_data, netcdf_filename):
log_data = self._correct_dup_short_names(log_data)
if self.args.mission == "2025.316.02" and self.args.add_seconds:
# So far only this mission is known to suffer from GPS Week Rollover bug
log_data = self.correct_times(log_data, self.args.add_seconds)
self.nc_file.createDimension(TIME, len(log_data[0].data))
for variable in log_data:
self.logger.debug(
Expand Down Expand Up @@ -705,6 +733,10 @@ def _process_log_file(self, log_filename, netcdf_filename, src_dir=None):
self.nc_file.summary = SUMMARY_SOURCE.format(src_dir)
if hasattr(self.args, "summary") and self.args.summary:
self.nc_file.summary = self.args.summary
if self.args.add_seconds:
self.nc_file.summary += (
f". Corrected timeTag variables by adding {self.args.add_seconds} seconds. "
)
monotonic = monotonic_increasing_time_indices(self.nc_file["time"][:])
if (~monotonic).any():
self.logger.info(
Expand Down Expand Up @@ -953,6 +985,14 @@ def process_command_line(self):
action="store",
help="Directory for the vehicle's mission logs, e.g.: /Volumes/AUVCTD/missionlogs",
)
parser.add_argument(
# To use for mission 2025.316.02 which suffered from the GPS week rollover bug:
# 1024 * 7 * 24 * 3600 = 619315200 seconds to add to timeTag variables in the log_data
"--add_seconds",
type=int,
default=0,
help="Seconds to add to timeTag in log data",
)
parser.add_argument(
"-v",
"--verbose",
Expand Down Expand Up @@ -988,10 +1028,8 @@ def process_command_line(self):
else:
raise argparse.ArgumentError(
None,
"Must provide --src_dir with --auv_name & --mission",
"Must provide --vehicle_dir with --auv_name & --mission",
)

auv_netcdf.download_process_logs(src_dir=Path())
elif auv_netcdf.args.start and auv_netcdf.args.end:
auv_netcdf._deployments_between()
else:
Expand Down
12 changes: 11 additions & 1 deletion src/data/process.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,6 +226,7 @@ def download_process(self, mission: str, src_dir: str) -> None:
auv_netcdf.args.auv_name = self.vehicle
auv_netcdf.args.mission = mission
auv_netcdf.args.use_portal = self.args.use_portal
auv_netcdf.args.add_seconds = self.args.add_seconds
auv_netcdf.set_portal()
auv_netcdf.args.verbose = self.args.verbose
auv_netcdf.logger.setLevel(self._log_levels[self.args.verbose])
Expand Down Expand Up @@ -314,7 +315,7 @@ def align(self, mission: str = "", log_file: str = "") -> None:
align_netcdf.commandline = self.commandline
try:
if log_file:
netcdf_dir = align_netcdf.process_cal(log_file=log_file)
netcdf_dir = align_netcdf.process_combined(log_file=log_file)
else:
netcdf_dir = align_netcdf.process_cal()
align_netcdf.write_netcdf(netcdf_dir)
Expand Down Expand Up @@ -977,6 +978,15 @@ def process_command_line(self):
type=int,
help="Number of core processors to use",
)
parser.add_argument(
"--add_seconds",
action="store",
type=int,
help=(
"Add seconds to time variables. Used to correct Dorado log files "
"saved with GPS Week Rollover Bug."
),
)
parser.add_argument(
"-v",
"--verbose",
Expand Down