From ca7e57467db786eb6133f6790caaffce57769c91 Mon Sep 17 00:00:00 2001 From: Mike McCann Date: Sun, 7 Dec 2025 18:29:39 -0800 Subject: [PATCH 1/7] Log all exceptions from each step of process_log_file(). --- src/data/process.py | 44 +++++++++++++++++++++++++++++++++++--------- 1 file changed, 35 insertions(+), 9 deletions(-) diff --git a/src/data/process.py b/src/data/process.py index d2d3376..452fffc 100755 --- a/src/data/process.py +++ b/src/data/process.py @@ -106,6 +106,10 @@ def wrapper(self, log_file: str): return func(self, log_file) except (TestMission, FailedMission, EOFError) as e: self.logger.info(str(e)) + except Exception: + # Catch all other exceptions and log full traceback + self.logger.exception("Exception occurred while processing %s", log_file) + raise finally: if hasattr(self, "log_handler"): # Cleanup and archiving logic @@ -545,9 +549,14 @@ def align(self, mission: str = "", log_file: str = "") -> None: netcdf_dir = align_netcdf.process_cal() align_netcdf.write_combined_netcdf(netcdf_dir) except (FileNotFoundError, EOFError) as e: - align_netcdf.logger.error("%s %s", mission, e) # noqa: TRY400 - error_message = f"{mission} {e}" + align_netcdf.logger.error("%s %s", mission or log_file, e) # noqa: TRY400 + error_message = f"{mission or log_file} {e}" raise InvalidCalFile(error_message) from e + except Exception: + align_netcdf.logger.exception( + "Exception occurred during alignment of %s", mission or log_file + ) + raise finally: align_netcdf.logger.removeHandler(self.log_handler) @@ -604,6 +613,11 @@ def resample(self, mission: str = "", log_file: str = "") -> None: resamp.resample_mission(nc_file) except (FileNotFoundError, InvalidAlignFile) as e: self.logger.error("%s %s", nc_file, e) # noqa: TRY400 + except Exception: + resamp.logger.exception( + "Exception occurred during resampling of %s", mission or log_file + ) + raise finally: resamp.logger.removeHandler(self.log_handler) @@ -981,11 +995,17 @@ def extract(self, log_file: str) -> None: extract.logger.setLevel(self._log_levels[self.config["verbose"]]) extract.logger.addHandler(self.log_handler) - url = os.path.join(BASE_LRAUV_WEB, log_file) # noqa: PTH118 - output_dir = Path(BASE_LRAUV_PATH, Path(log_file).parent) - extract.logger.info("Downloading %s", url) - input_file = extract.download_with_pooch(url, output_dir) - return extract.extract_groups_to_files_netcdf4(input_file) + try: + url = os.path.join(BASE_LRAUV_WEB, log_file) # noqa: PTH118 + output_dir = Path(BASE_LRAUV_PATH, Path(log_file).parent) + extract.logger.info("Downloading %s", url) + input_file = extract.download_with_pooch(url, output_dir) + return extract.extract_groups_to_files_netcdf4(input_file) + except Exception: + extract.logger.exception("Exception occurred during extraction of %s", log_file) + raise + finally: + extract.logger.removeHandler(self.log_handler) def combine(self, log_file: str) -> None: self.logger.info("Combining netCDF files for log file: %s", log_file) @@ -1002,8 +1022,14 @@ def combine(self, log_file: str) -> None: combine.logger.setLevel(self._log_levels[self.config["verbose"]]) combine.logger.addHandler(self.log_handler) - combine.combine_groups() - combine.write_netcdf() + try: + combine.combine_groups() + combine.write_netcdf() + except Exception: + combine.logger.exception("Exception occurred during combine of %s", log_file) + raise + finally: + combine.logger.removeHandler(self.log_handler) @log_file_processor def process_log_file(self, log_file: str) -> None: From 48d29cc774924f510bafe05703a48a3b49b016c4 Mon Sep 17 00:00:00 2001 From: Mike McCann Date: Mon, 8 Dec 2025 10:50:26 -0800 Subject: [PATCH 2/7] Sort SCIENG_PARMS and add debug statements around filtering code. --- src/data/nc42netcdfs.py | 35 ++++++++++++++++++++++++++++++++++- 1 file changed, 34 insertions(+), 1 deletion(-) diff --git a/src/data/nc42netcdfs.py b/src/data/nc42netcdfs.py index e463f34..844df80 100755 --- a/src/data/nc42netcdfs.py +++ b/src/data/nc42netcdfs.py @@ -199,7 +199,7 @@ def extract_groups_to_files_netcdf4(self, log_file: str) -> Path: # Extract all other groups all_groups = list(src_dataset.groups.keys()) - for group_name in SCIENG_PARMS: + for group_name in sorted(SCIENG_PARMS): if group_name == "/" or group_name not in all_groups: if group_name != "/" and group_name not in all_groups: self.logger.warning("Group %s not found in %s", group_name, input_file) @@ -555,11 +555,44 @@ def _process_single_time_coordinate( # noqa: PLR0913 # Get the valid time subset valid_time_data = original_time_data[spike_removed_indices] + self.logger.debug( + "Before monotonic: len(valid_time_data)=%d, len(spike_removed_indices)=%d, " + "type(spike_removed_indices)=%s", + len(valid_time_data), + len(spike_removed_indices), + type(spike_removed_indices).__name__, + ) + self.logger.debug( + "valid_time_data shape/size: %s", + getattr(valid_time_data, "shape", len(valid_time_data)), + ) # Apply monotonic filtering mono_indices_in_filtered = self._get_monotonic_indices(valid_time_data) + self.logger.debug( + "After monotonic: len(mono_indices)=%d, max(mono_indices)=%s, type(mono_indices)=%s", + len(mono_indices_in_filtered), + max(mono_indices_in_filtered) if len(mono_indices_in_filtered) > 0 else "N/A", + type(mono_indices_in_filtered).__name__, + ) # Convert monotonic indices back to original array indices + if len(mono_indices_in_filtered) > 0 and max(mono_indices_in_filtered) >= len( + spike_removed_indices + ): + self.logger.error( + "BUG: monotonic indices out of range! max(mono_indices)=%d >= " + "len(spike_removed)=%d", + max(mono_indices_in_filtered), + len(spike_removed_indices), + ) + self.logger.error("mono_indices_in_filtered: %s", mono_indices_in_filtered[:20]) + self.logger.error("spike_removed_indices: %s", spike_removed_indices[:20]) + # Clamp to valid range to prevent crash + mono_indices_in_filtered = [ + i for i in mono_indices_in_filtered if i < len(spike_removed_indices) + ] + final_indices = [spike_removed_indices[i] for i in mono_indices_in_filtered] # Store data for plotting if requested (do this before early return) From 663a88b89be4189f0468d2cede74cdee52c4eb76 Mon Sep 17 00:00:00 2001 From: Mike McCann Date: Mon, 8 Dec 2025 10:51:23 -0800 Subject: [PATCH 3/7] Trap IndexError in nudge_positions with log messages. --- .vscode/launch.json | 17 ++++++++++++++--- src/data/utils.py | 13 +++++++++++-- 2 files changed, 25 insertions(+), 5 deletions(-) diff --git a/.vscode/launch.json b/.vscode/launch.json index 0a71902..86800ff 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -60,7 +60,7 @@ //"args": ["-v", "2", "--log_file", "brizo/missionlogs/2025/20250909_20250915/20250914T080941/202509140809_202509150109.nc4"] //"args": ["-v", "2", "--log_file", "brizo/missionlogs/2025/20250909_20250915/20250914T080941/202509140809_202509150109.nc4", "--plot_time", "/longitude_time"] //"args": ["-v", "1", "--log_file", "brizo/missionlogs/2025/20250909_20250915/20250914T080941/202509140809_202509150109.nc4", "--plot_universals"] - "args": ["-v", "1", "--log_file", "brizo/missionlogs/2025/20250909_20250915/20250914T080941/202509140809_202509150109.nc4", "--plot_time", "/latitude_time"] + //"args": ["-v", "1", "--log_file", "brizo/missionlogs/2025/20250909_20250915/20250914T080941/202509140809_202509150109.nc4", "--plot_time", "/latitude_time"] // brizo 20250916T230652 has several ESP Samples from stoqs_lrauv_sep2025 //"args": ["-v", "2", "--log_file", "brizo/missionlogs/2025/20250916_20250922/20250916T230652/202509162306_202509180305.nc4", "--plot_time", "/longitude_time"] //"args": ["-v", "1", "--log_file", "brizo/missionlogs/2025/20250909_20250915/20250914T080941/202509140809_202509150109_cleaned_by_quinn.nc4", "--plot_time", "/longitude_time"] @@ -69,6 +69,9 @@ //"args": ["-v", "1", "--log_file", "tethys/missionlogs/2012/20120908_20120920/20120917T025522/201209170255_201209171110.nc4", "--plot_time", "/longitude_time"] // Single spike in longitude_time //"args": ["-v", "1", "--log_file", "brizo/missionlogs/2025/20250909_20250915/20250914T080941/202509140809_202509150109.nc4", "--plot_time", "/NAL9602/longitude_fix_time"] + // Fails in nudge_positions - bad time removed by outlier detection, then that index re-appears in longitude_fix_time + //"args": ["-v", "1", "--log_file", "brizo/missionlogs/2025/20250909_20250915/20250911T073640/202509110736_202509120809.nc4", "--plot_universals"] + "args": ["-v", "2", "--log_file", "brizo/missionlogs/2025/20250909_20250915/20250911T073640/202509110736_202509120809.nc4", "--plot_time", "/NAL9602/longitude_fix_time"] }, { "name": "2.0 - calibrate.py", @@ -125,11 +128,13 @@ "program": "${workspaceFolder}/src/data/combine.py", "console": "integratedTerminal", "justMyCode": false, - "args": ["-v", "1", "--log_file", "brizo/missionlogs/2025/20250909_20250915/20250914T080941/202509140809_202509150109.nc4", "--plot"] + //"args": ["-v", "1", "--log_file", "brizo/missionlogs/2025/20250909_20250915/20250914T080941/202509140809_202509150109.nc4", "--plot"] //"args": ["-v", "1", "--log_file", "brizo/missionlogs/2025/20250916_20250922/20250916T230652/202509162306_202509180305.nc4", "--plot"] //"args": ["-v", "1", "--log_file", "tethys/missionlogs/2012/20120908_20120920/20120909T010636/201209090106_201209091521.nc4", "--plot"] // Conflicting sizes for nudged_time and data - fixed by filtering GPS fixes to be monotonically increasing //"args": ["-v", "1", "--log_file", "tethys/missionlogs/2012/20120908_20120920/20120917T025522/201209170255_201209171110.nc4", "--plot"] + // Fails in nudge_positions, maybe bad GPS data? + "args": ["-v", "1", "--log_file", "brizo/missionlogs/2025/20250909_20250915/20250911T073640/202509110736_202509120809.nc4", "--plot"] }, { @@ -348,7 +353,7 @@ // Lots bad time values in brizo 20250914T080941 due to memory corruption on the vehicle //"args": ["-v", "1", "--log_file", "brizo/missionlogs/2025/20250909_20250915/20250914T080941/202509140809_202509150109.nc4"] //"args": ["-v", "2", "--log_file", "brizo/missionlogs/2025/20250909_20250915/20250914T080941/202509140809_202509150109.nc4", "--clobber"] - "args": ["-v", "2", "--log_file", "brizo/missionlogs/2025/20250909_20250915/20250914T080941/202509140809_202509150109.nc4", "--clobber", "--no_cleanup"] + //"args": ["-v", "2", "--log_file", "brizo/missionlogs/2025/20250909_20250915/20250914T080941/202509140809_202509150109.nc4", "--clobber", "--no_cleanup"] //"args": ["-v", "1", "--log_file", "brizo/missionlogs/2025/20250916_20250922/20250916T230652/202509162306_202509180305.nc4", "--no_cleanup"] //"args": ["-v", "1", "--log_file", "brizo/missionlogs/2025/20250916_20250922/20250916T230652/202509162306_202509180305.nc4", "--no_cleanup", "--clobber"] // Has different universals time coodinates for longitude/latitude and depth @@ -366,6 +371,12 @@ //"args": ["-v", "1", "--auv_name", "pontus", "--start", "20250601T000000", "--end", "20250721T000000", "--noinput", "--num_cores", "1", "--no_cleanup"] //"args": ["-v", "1", "--auv_name", "pontus", "--start", "20250601T000000", "--end", "20250702T000000", "--noinput", "--num_cores", "1", "--no_cleanup", "--clobber"] //"args": ["-v", "1", "--log_file", "pontus/missionlogs/2025/20250623_20250707/20250707T043011/slate.nc4", "--no_cleanup"] + //"args": ["-v", "1", "--log_file", "brizo/missionlogs/2025/20250903_20250903/20250903T202626/202509032026_202509032030.nc4", "--no_cleanup"] + // Does not have ctdseabird data, hence no coordinates for resampling + //"args": ["-v", "1", "--log_file", "brizo/missionlogs/2025/20250903_20250903/20250903T202626/202509032026_202509032030.nc4", "--no_cleanup"] + // Fails in nudge_positions, maybe bad GPS data? + "args": ["-v", "1", "--log_file", "brizo/missionlogs/2025/20250909_20250915/20250911T073640/202509110736_202509120809.nc4", "--no_cleanup"] + //"args": ["-v", "1", "--auv_name", "brizo", "--start", "20250901T000000", "--end", "20251001T000000", "--noinput", "--num_cores", "1", "--no_cleanup"] }, ] diff --git a/src/data/utils.py b/src/data/utils.py index c635da1..5c74ae2 100644 --- a/src/data/utils.py +++ b/src/data/utils.py @@ -213,8 +213,17 @@ def nudge_positions( # noqa: C901, PLR0912, PLR0913, PLR0915 end_sec_diff = float(lat_fix.cf["T"].data[i + 1] - lat.cf["T"].data[segi[-1]]) / 1.0e9 - end_lon_diff = float(lon_fix[i + 1]) - float(lon[segi[-1]]) - end_lat_diff = float(lat_fix[i + 1]) - float(lat[segi[-1]]) + try: + end_lon_diff = float(lon_fix[i + 1]) - float(lon[segi[-1]]) + end_lat_diff = float(lat_fix[i + 1]) - float(lat[segi[-1]]) + except IndexError as e: + logger.warning("IndexError computing end_lon_diff/end_lat_diff: %s", e) + logger.info( + "Setting end_lon_diff and end_lat_diff to 0 - error likely due " + "filtering out bad GPS time data in nc42netdefs.py" + ) + end_lat_diff = 0 + end_lon_diff = 0 # Compute approximate horizontal drift rate as a sanity check try: From 0bc601ba816e95ca227e5ab1c72d81f0ac72fda6 Mon Sep 17 00:00:00 2001 From: Mike McCann Date: Mon, 8 Dec 2025 10:53:15 -0800 Subject: [PATCH 4/7] Better organize coordinate assignement, ensure that coords have proper standard_names. --- src/data/align.py | 117 +++++++++++++++++++++++----------------------- 1 file changed, 59 insertions(+), 58 deletions(-) diff --git a/src/data/align.py b/src/data/align.py index b2d41db..046954a 100755 --- a/src/data/align.py +++ b/src/data/align.py @@ -568,66 +568,67 @@ def process_combined(self) -> Path: # noqa: C901, PLR0912, PLR0915 np.mean(np.diff(self.combined_nc[timevar])) / np.timedelta64(1, "s") ) - time_coord = variable_time_coord_mapping.get(variable) - depth_coord = ( - time_coord[:-5] + "_depth" - if time_coord and time_coord.endswith("_time") - else f"{group_name}_depth" - ) - lat_coord = ( - time_coord[:-5] + "_latitude" - if time_coord and time_coord.endswith("_time") - else f"{group_name}_latitude" - ) - lon_coord = ( - time_coord[:-5] + "_longitude" - if time_coord and time_coord.endswith("_time") - else f"{group_name}_longitude" - ) - - # Add interpolated depth, latitude, and longitude variables - if depth_coord in self.combined_nc: - self.aligned_nc[depth_coord].attrs = self.combined_nc[depth_coord].attrs - self.aligned_nc[depth_coord] = xr.DataArray( - depth_interp(var_time).astype(np.float64).tolist(), - dims={timevar}, - coords=[self.combined_nc[variable].get_index(timevar)], - name=depth_coord, - ) - self.aligned_nc[depth_coord].attrs["long_name"] = "Depth" - self.aligned_nc[depth_coord].attrs["comment"] = "depth from Group_Universals.nc" + # Determine coordinate variable names based on group + depth_coord = f"{group_name}_depth" + lat_coord = f"{group_name}_latitude" + lon_coord = f"{group_name}_longitude" + TINY_SAMPLE_RATE = 10e-2 - self.aligned_nc[depth_coord].attrs["instrument_sample_rate_hz"] = ( + sample_rate_str = ( f"{sample_rate:.2f}" if sample_rate > TINY_SAMPLE_RATE else f"{sample_rate:.6f}" ) - self.aligned_nc[lat_coord] = xr.DataArray( - lat_interp(var_time).astype(np.float64).tolist(), - dims={timevar}, - coords=[self.combined_nc[variable].get_index(timevar)], - name=lat_coord, - ) - self.aligned_nc[lat_coord].attrs = self.combined_nc["nudged_latitude"].attrs - self.aligned_nc[lat_coord].attrs["comment"] += ( - f". Variable nudged_latitude linearly" - f" interpolated onto {variable.split('_')[0]} time values." - ) - self.aligned_nc[lat_coord].attrs["long_name"] = "Latitude" - self.aligned_nc[lat_coord].attrs["instrument_sample_rate_hz"] = sample_rate - - self.aligned_nc[lon_coord] = xr.DataArray( - lon_interp(var_time).astype(np.float64).tolist(), - dims={timevar}, - coords=[self.combined_nc[variable].get_index(timevar)], - name=lon_coord, - ) - self.aligned_nc[lon_coord].attrs = self.combined_nc["nudged_longitude"].attrs - self.aligned_nc[lon_coord].attrs["comment"] += ( - f". Variable nudged_longitude linearly" - f" interpolated onto {variable.split('_')[0]} time values." - ) - self.aligned_nc[lon_coord].attrs["long_name"] = "Longitude" - self.aligned_nc[lon_coord].attrs["instrument_sample_rate_hz"] = sample_rate + # Create depth coordinate - only if not already created for this group + if depth_coord not in self.aligned_nc: + self.aligned_nc[depth_coord] = xr.DataArray( + depth_interp(var_time).astype(np.float64).tolist(), + dims={timevar}, + coords=[self.combined_nc[variable].get_index(timevar)], + name=depth_coord, + ) + # Copy attributes from combined_nc if they exist - ensusre proper standard_name + if depth_coord in self.combined_nc: + self.aligned_nc[depth_coord].attrs = self.combined_nc[depth_coord].attrs.copy() + self.aligned_nc[depth_coord].attrs["long_name"] = "Depth" + self.aligned_nc[depth_coord].attrs["standard_name"] = "depth" + self.aligned_nc[depth_coord].attrs["comment"] = "depth from Group_Universals.nc" + self.aligned_nc[depth_coord].attrs["instrument_sample_rate_hz"] = sample_rate_str + + # Create latitude coordinate - only if not already created for this group + if lat_coord not in self.aligned_nc: + self.aligned_nc[lat_coord] = xr.DataArray( + lat_interp(var_time).astype(np.float64).tolist(), + dims={timevar}, + coords=[self.combined_nc[variable].get_index(timevar)], + name=lat_coord, + ) + self.aligned_nc[lat_coord].attrs = self.combined_nc["nudged_latitude"].attrs.copy() + self.aligned_nc[lat_coord].attrs["comment"] += ( + f". Variable nudged_latitude linearly" + f" interpolated onto {group_name} time values." + ) + # Ensure proper standard_name + self.aligned_nc[lat_coord].attrs["long_name"] = "Latitude" + self.aligned_nc[lat_coord].attrs["standard_name"] = "latitude" + self.aligned_nc[lat_coord].attrs["instrument_sample_rate_hz"] = sample_rate_str + + # Create longitude coordinate - only if not already created for this group + if lon_coord not in self.aligned_nc: + self.aligned_nc[lon_coord] = xr.DataArray( + lon_interp(var_time).astype(np.float64).tolist(), + dims={timevar}, + coords=[self.combined_nc[variable].get_index(timevar)], + name=lon_coord, + ) + self.aligned_nc[lon_coord].attrs = self.combined_nc["nudged_longitude"].attrs.copy() + self.aligned_nc[lon_coord].attrs["comment"] += ( + f". Variable nudged_longitude linearly" + f" interpolated onto {group_name} time values." + ) + # Ensure proper standard_name + self.aligned_nc[lon_coord].attrs["long_name"] = "Longitude" + self.aligned_nc[lon_coord].attrs["standard_name"] = "longitude" + self.aligned_nc[lon_coord].attrs["instrument_sample_rate_hz"] = sample_rate_str # Update spatial temporal bounds for global metadata if pd.to_datetime(self.aligned_nc[timevar][0].values).tz_localize(UTC) < pd.to_datetime( @@ -661,13 +662,13 @@ def process_combined(self) -> Path: # noqa: C901, PLR0912, PLR0915 ) self.aligned_nc[variable].attrs = self.combined_nc[variable].attrs if ( - time_coord in self.aligned_nc + timevar in self.aligned_nc and depth_coord in self.aligned_nc and lat_coord in self.aligned_nc and lon_coord in self.aligned_nc ): self.aligned_nc[variable].attrs["coordinates"] = ( - f"{time_coord} {depth_coord} {lat_coord} {lon_coord}" + f"{timevar} {depth_coord} {lat_coord} {lon_coord}" ) else: self.logger.info("Skipping setting coordinates attribute for %s", variable) From a2173a49d6ae3109cb5e59eff407f4d821fd9424 Mon Sep 17 00:00:00 2001 From: Mike McCann Date: Mon, 8 Dec 2025 10:54:08 -0800 Subject: [PATCH 5/7] Rename resample_mission() to resample_align_file() as it's also used for LRAUV log_files. --- src/data/process.py | 2 +- src/data/resample.py | 13 +++++++++++-- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/src/data/process.py b/src/data/process.py index 452fffc..cd947ef 100755 --- a/src/data/process.py +++ b/src/data/process.py @@ -610,7 +610,7 @@ def resample(self, mission: str = "", log_file: str = "") -> None: raise ValueError(error_message) subprocess.run([wget_path, dap_file_str, "-O", nc_file_str], check=True) # noqa: S603 try: - resamp.resample_mission(nc_file) + resamp.resample_align_file(nc_file) except (FileNotFoundError, InvalidAlignFile) as e: self.logger.error("%s %s", nc_file, e) # noqa: TRY400 except Exception: diff --git a/src/data/resample.py b/src/data/resample.py index fb8bd5c..424a0be 100755 --- a/src/data/resample.py +++ b/src/data/resample.py @@ -1769,7 +1769,7 @@ def get_mission_start_end( ) return mission_start, mission_end, instrs_to_pad - def resample_mission( # noqa: C901, PLR0912, PLR0915, PLR0913 + def resample_align_file( # noqa: C901, PLR0912, PLR0915, PLR0913 self, nc_file: str, # align.nc file mf_width: int = MF_WIDTH, @@ -1785,6 +1785,7 @@ def resample_mission( # noqa: C901, PLR0912, PLR0915, PLR0913 self.ds = xr.open_dataset(nc_file) mission_start, mission_end, instrs_to_pad = self.get_mission_start_end(nc_file) last_instr = "" + pitch_corrected_instr = "" for icount, (instr, variables) in enumerate( self.instruments_variables(nc_file).items(), ): @@ -1897,6 +1898,14 @@ def resample_mission( # noqa: C901, PLR0912, PLR0915, PLR0913 if self.plot: self.plot_variable(instr, variable, freq, plot_seconds) + if not pitch_corrected_instr: + self.logger.warning( + "No pitch corrected CTD instrument found for resampling in file %s" + " - no output file created", + nc_file, + ) + return + # Call vehicle-specific metadata method which will call _build_global_metadata() if self.auv_name.lower() == "dorado": self.resampled_nc.attrs = self.dorado_global_metadata() @@ -1990,7 +1999,7 @@ def process_command_line(self): p_start = time.time() # Everything that Resampler needs should be in the self described nc_file # whether it is Dorado/i2MAP or LRAUV - resamp.resample_mission( + resamp.resample_align_file( nc_file, mf_width=resamp.args.mf_width, freq=resamp.args.freq, From aaf56061ce30fc457e8455f46922d627167c0dcc Mon Sep 17 00:00:00 2001 From: Mike McCann Date: Mon, 8 Dec 2025 11:06:33 -0800 Subject: [PATCH 6/7] Processing brizo for September 2025 succeeds on local dev machine. --- .vscode/launch.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.vscode/launch.json b/.vscode/launch.json index 86800ff..3e4f8f9 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -375,8 +375,8 @@ // Does not have ctdseabird data, hence no coordinates for resampling //"args": ["-v", "1", "--log_file", "brizo/missionlogs/2025/20250903_20250903/20250903T202626/202509032026_202509032030.nc4", "--no_cleanup"] // Fails in nudge_positions, maybe bad GPS data? - "args": ["-v", "1", "--log_file", "brizo/missionlogs/2025/20250909_20250915/20250911T073640/202509110736_202509120809.nc4", "--no_cleanup"] - //"args": ["-v", "1", "--auv_name", "brizo", "--start", "20250901T000000", "--end", "20251001T000000", "--noinput", "--num_cores", "1", "--no_cleanup"] + //"args": ["-v", "1", "--log_file", "brizo/missionlogs/2025/20250909_20250915/20250911T073640/202509110736_202509120809.nc4", "--no_cleanup"] + "args": ["-v", "1", "--auv_name", "brizo", "--start", "20250901T000000", "--end", "20251001T000000", "--noinput", "--num_cores", "1", "--no_cleanup"] }, ] From 61cc96f95647e53214cd7a8aa716b0d507b800b6 Mon Sep 17 00:00:00 2001 From: Mike McCann Date: Mon, 8 Dec 2025 11:11:19 -0800 Subject: [PATCH 7/7] Update EXPECTED_MD5_GITHUB. --- src/data/test_process_dorado.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/data/test_process_dorado.py b/src/data/test_process_dorado.py index 88217bb..35520db 100644 --- a/src/data/test_process_dorado.py +++ b/src/data/test_process_dorado.py @@ -50,7 +50,7 @@ def test_process_dorado(complete_dorado_processing): check_md5 = True if check_md5: # Check that the MD5 hash has not changed - EXPECTED_MD5_GITHUB = "fec067d16eb5280a8bc2b6ef132821b8" + EXPECTED_MD5_GITHUB = "432690b72faf604e9845cfe6c3eb5b3e" EXPECTED_MD5_ACT = "316955fd489862ad9ed5b63df8aa7db8" EXPECTED_MD5_LOCAL = "f635cee8760aa0f40bd2070bc0c5fa65" if str(proc.args.base_path).startswith("/home/runner"):