Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 5 additions & 2 deletions .vscode/launch.json
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,9 @@
//"args": ["--auv_name", "dorado", "--mission", "2017.044.00", "--noinput", "-v"]
//"args": ["--auv_name", "i2map", "--mission", "2019.157.02", "--noinput", "-v", "1"]
//"args": ["--auv_name", "dorado", "--mission", "2010.265.00", "--noinput", "-v"]
"args": ["--auv_name", "dorado", "--mission", "2023.324.00", "--noinput", "-v", "--vehicle_dir", "/Volumes/AUVCTD/missionlogs"]
//"args": ["--auv_name", "dorado", "--mission", "2023.324.00", "--noinput", "-v", "--vehicle_dir", "/Volumes/AUVCTD/missionlogs"]
// Mission suffering from GPS Rollover bug. Add 1024 * 7 * 24 * 3600 = 619315200 seconds
"args": ["--auv_name", "dorado", "--mission", "2025.316.02", "--noinput", "-v", "--vehicle_dir", "/Volumes/AUVCTD/missionlogs", "--add_seconds", "619315200" ]
},
{
"name": "1.1 - lopcToNetCDF",
Expand Down Expand Up @@ -271,7 +273,8 @@
//"args": ["-v", "1", "--noinput", "--no_cleanup", "--mission", "2008.010.10"]
//"args": ["-v", "2", "--mission", "2004.029.03", "--noinput", "--no_cleanup"],
//"args": ["-v", "1", "--mission", "2023.192.01", "--noinput", "--no_cleanup"],
"args": ["-v", "1", "--mission", "2010.151.04", "--noinput", "--no_cleanup", "--clobber"],
//"args": ["-v", "1", "--mission", "2010.151.04", "--noinput", "--no_cleanup", "--clobber"],
"args": ["-v", "1", "--mission", "2025.316.02", "--noinput", "--no_cleanup", "--add_seconds", "619315200"],

},
{
Expand Down
2 changes: 2 additions & 0 deletions src/data/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,7 @@ def complete_dorado_processing():
ns.no_cleanup = True
ns.skip_download_process = False
ns.num_cores = 1
ns.add_seconds = None
ns.verbose = 1
proc.args = ns
proc.process_missions(TEST_START_YEAR)
Expand Down Expand Up @@ -147,6 +148,7 @@ def complete_i2map_processing():
ns.skip_download_process = False
ns.last_n_days = 0
ns.num_cores = 1
ns.add_seconds = None
ns.verbose = 1
proc.args = ns
proc.process_missions(TEST_START_YEAR)
Expand Down
40 changes: 19 additions & 21 deletions src/data/create_products.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,28 +88,26 @@ class CreateProducts:
}

def _open_ds(self):
if self.args.local:
local_nc = Path(
BASE_PATH,
self.args.auv_name,
MISSIONNETCDFS,
self.args.mission,
f"{self.args.auv_name}_{self.args.mission}_{FREQ}.nc",
)
local_nc = Path(
BASE_PATH,
self.args.auv_name,
MISSIONNETCDFS,
self.args.mission,
f"{self.args.auv_name}_{self.args.mission}_{FREQ}.nc",
)
# Requires mission to have been processed and archived to AUVCTD
dap_url = os.path.join( # noqa: PTH118
AUVCTD_OPENDAP_BASE,
"surveys",
self.args.mission.split(".")[0],
"netcdf",
f"{self.args.auv_name}_{self.args.mission}_{FREQ}.nc",
)
try:
self.ds = xr.open_dataset(dap_url)
except OSError:
self.logger.debug("%s not available yet", dap_url)
self.ds = xr.open_dataset(local_nc)
else:
# Requires mission to have been processed and archived to AUVCTD
dap_url = os.path.join( # noqa: PTH118
AUVCTD_OPENDAP_BASE,
"surveys",
self.args.mission.split(".")[0],
"netcdf",
f"{self.args.auv_name}_{self.args.mission}_{FREQ}.nc",
)
try:
self.ds = xr.open_dataset(dap_url)
except OSError as err:
self.logger.error("Error opening %s: %s", dap_url, err) # noqa: TRY400

def _grid_dims(self) -> tuple:
# From Matlab code in plot_sections.m:
Expand Down
8 changes: 8 additions & 0 deletions src/data/dorado_info.py
Original file line number Diff line number Diff line change
Expand Up @@ -2954,3 +2954,11 @@
" - ctdToUse = ctd2 "
),
}
dorado_info["2025.316.02"] = {
"program": f"{MBTSLINE}",
"comment": (
"Monterey Bay MBTS Mission - 31625G"
" ISUS, and LISST payloads removed, main vehicle computer NTP synced with GPS Week Rollover Bug, 1024*7*24*3600 seconds added to timestamps. "
" - ctdToUse = ctd2 "
),
}
46 changes: 25 additions & 21 deletions src/data/gulper.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,27 +32,31 @@ def mission_start_esecs(self) -> float:
return self.args.start_esecs

# Get the first time record from mission's navigation.nc file
if self.args.local:
url = Path(
self.args.base_path,
self.args.auv_name,
MISSIONNETCDFS,
self.args.mission,
"navigation.nc",
)
else:
# Relies on auv-python having processed the mission
url = os.path.join( # noqa: PTH118
"http://dods.mbari.org/opendap/data/auvctd/",
MISSIONNETCDFS,
self.args.mission.split(".")[0],
self.args.mission.split(".")[0] + self.args.mission.split(".")[1],
self.args.mission,
"navigation.nc",
)
self.logger.info("Reading mission start time from %s", url)
ds = xr.open_dataset(url)
return ds.time[0].to_numpy().astype("float64") / 1e9
file_path = Path(
self.args.base_path,
self.args.auv_name,
MISSIONNETCDFS,
self.args.mission,
"navigation.nc",
)
# Relies on auv-python having processed the mission
url = os.path.join( # noqa: PTH118
"http://dods.mbari.org/opendap/data/auvctd/",
MISSIONNETCDFS,
self.args.mission.split(".")[0],
self.args.mission.split(".")[0] + self.args.mission.split(".")[1],
self.args.mission,
"navigation.nc",
)
try:
self.logger.info("Reading mission start time from url = %s", url)
ds = xr.open_dataset(url, decode_times=False)
return ds.time[0].to_numpy().astype("float64") / 1e9
except OSError:
self.logger.info("%s not available yet", url)
self.logger.info("Reading mission start time from file_path = %s", file_path)
ds = xr.open_dataset(file_path, decode_times=False)
return ds.time[0].to_numpy().astype("float64")

def parse_gulpers(self, sec_delay: int = 1) -> dict: # noqa: C901, PLR0912, PLR0915
"""Parse the Gulper times and bottle numbers from the auvctd syslog file.
Expand Down
44 changes: 41 additions & 3 deletions src/data/logs2netcdfs.py
Original file line number Diff line number Diff line change
Expand Up @@ -552,8 +552,36 @@ def _create_variable( # noqa: PLR0913
self.logger.error("data seriously does not match shape") # noqa: TRY400
raise

def correct_times(self, log_data, add_seconds: int = 0):
"""Correct timeTag variables in log_data by adding add_seconds
returning the corrected log_data"""
new_log_data = []
for variable in log_data:
if variable.data_type == "timeTag":
self.logger.info(
"Adding to time values: %d seconds",
add_seconds,
)
# Create a new log_record with corrected data
corrected_variable = log_record(
variable.data_type,
variable.short_name,
variable.long_name,
variable.units,
variable.instrument_name,
[tv + add_seconds for tv in variable.data], # ← New data list
)
new_log_data.append(corrected_variable)
else:
# For non-timeTag variables
new_log_data.append(variable) # or create a copy if needed
return new_log_data

def write_variables(self, log_data, netcdf_filename):
log_data = self._correct_dup_short_names(log_data)
if self.args.mission == "2025.316.02" and self.args.add_seconds:
# So far only this mission is known to suffer from GPS Week Rollover bug
log_data = self.correct_times(log_data, self.args.add_seconds)
self.nc_file.createDimension(TIME, len(log_data[0].data))
for variable in log_data:
self.logger.debug(
Expand Down Expand Up @@ -682,6 +710,10 @@ def _process_log_file(self, log_filename, netcdf_filename, src_dir=None):
self.nc_file.summary = SUMMARY_SOURCE.format(src_dir)
if hasattr(self.args, "summary") and self.args.summary:
self.nc_file.summary = self.args.summary
if self.args.add_seconds:
self.nc_file.summary += (
f". Corrected timeTag variables by adding {self.args.add_seconds} seconds. "
)
monotonic = monotonic_increasing_time_indices(self.nc_file["time"][:])
if (~monotonic).any():
self.logger.info(
Expand Down Expand Up @@ -930,6 +962,14 @@ def process_command_line(self):
action="store",
help="Directory for the vehicle's mission logs, e.g.: /Volumes/AUVCTD/missionlogs",
)
parser.add_argument(
# To use for mission 2025.316.02 which suffered from the GPS week rollover bug:
# 1024 * 7 * 24 * 3600 = 619315200 seconds to add to timeTag variables in the log_data
"--add_seconds",
type=int,
default=0,
help="Seconds to add to timeTag in log data",
)
parser.add_argument(
"-v",
"--verbose",
Expand Down Expand Up @@ -965,10 +1005,8 @@ def process_command_line(self):
else:
raise argparse.ArgumentError(
None,
"Must provide --src_dir with --auv_name & --mission",
"Must provide --vehicle_dir with --auv_name & --mission",
)

auv_netcdf.download_process_logs(src_dir=Path())
elif auv_netcdf.args.start and auv_netcdf.args.end:
auv_netcdf._deployments_between()
else:
Expand Down
10 changes: 10 additions & 0 deletions src/data/process.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,6 +173,7 @@ def download_process(self, mission: str, src_dir: str) -> None:
auv_netcdf.args.auv_name = self.vehicle
auv_netcdf.args.mission = mission
auv_netcdf.args.use_portal = self.args.use_portal
auv_netcdf.args.add_seconds = self.args.add_seconds
auv_netcdf.set_portal()
auv_netcdf.args.verbose = self.args.verbose
auv_netcdf.logger.setLevel(self._log_levels[self.args.verbose])
Expand Down Expand Up @@ -793,6 +794,15 @@ def process_command_line(self):
type=int,
help="Number of core processors to use",
)
parser.add_argument(
"--add_seconds",
action="store",
type=int,
help=(
"Add seconds to time variables. Used to correct Dorado log files "
"saved with GPS Week Rollover Bug."
),
)
parser.add_argument(
"-v",
"--verbose",
Expand Down
4 changes: 2 additions & 2 deletions src/data/test_process_dorado.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def test_process_dorado(complete_dorado_processing):
# but it will alert us if a code change unexpectedly changes the file size.
# If code changes are expected to change the file size then we should
# update the expected size here.
EXPECTED_SIZE_GITHUB = 621298
EXPECTED_SIZE_GITHUB = 621286
EXPECTED_SIZE_ACT = 621298
EXPECTED_SIZE_LOCAL = 621286
if str(proc.args.base_path).startswith("/home/runner"):
Expand All @@ -50,7 +50,7 @@ def test_process_dorado(complete_dorado_processing):
check_md5 = True
if check_md5:
# Check that the MD5 hash has not changed
EXPECTED_MD5_GITHUB = "6550bb8ed5919f21413f30dfffdcf116"
EXPECTED_MD5_GITHUB = "9f3f9e2e5abed08692ddb233dec0d0ac"
EXPECTED_MD5_ACT = "bdb9473e5dedb694618f518b8cf0ca1e"
EXPECTED_MD5_LOCAL = "6ecb2229b00835055619e982fe9d5023"
if str(proc.args.base_path).startswith("/home/runner"):
Expand Down
2 changes: 1 addition & 1 deletion src/data/test_process_i2map.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def test_process_i2map(complete_i2map_processing):
# but it will alert us if a code change unexpectedly changes the file size.
# If code changes are expected to change the file size then we should
# update the expected size here.
EXPECTED_SIZE_GITHUB = 58839
EXPECTED_SIZE_GITHUB = 58832
EXPECTED_SIZE_ACT = 58816
EXPECTED_SIZE_LOCAL = 58884
if str(proc.args.base_path).startswith("/home/runner"):
Expand Down