Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 1 addition & 25 deletions src/data/AUV.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,8 @@
"""

import logging
from datetime import UTC, datetime
from datetime import datetime

import coards
import numpy as np
import xarray as xr

Expand All @@ -28,29 +27,6 @@ def monotonic_increasing_time_indices(time_array: np.array) -> np.ndarray:
return np.array(monotonic)


class AUV:
def add_global_metadata(self):
iso_now = datetime.now(UTC).isoformat() + "Z"

self.nc_file.netcdf_version = "4"
self.nc_file.Conventions = "CF-1.6"
self.nc_file.date_created = iso_now
self.nc_file.date_update = iso_now
self.nc_file.date_modified = iso_now
self.nc_file.featureType = "trajectory"

self.nc_file.comment = ""

self.nc_file.time_coverage_start = (
coards.from_udunits(self.time[0], self.time.units).isoformat() + "Z"
)
self.nc_file.time_coverage_end = (
coards.from_udunits(self.time[-1], self.time.units).isoformat() + "Z"
)

self.nc_file.distribution_statement = "Any use requires prior approval from MBARI"


def nudge_positions( # noqa: C901, PLR0912, PLR0913, PLR0915
nav_longitude: xr.DataArray,
nav_latitude: xr.DataArray,
Expand Down
22 changes: 16 additions & 6 deletions src/data/combine.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,12 +181,12 @@ def global_metadata(self):
metadata["license"] = metadata["distribution_statement"]
metadata["useconst"] = "Not intended for legal use. Data may contain inaccuracies."
metadata["history"] = f"Created by {self.commandline} on {iso_now}"

log_file = self.args.log_file
metadata["title"] = (
f"Calibrated AUV sensor data from {self.args.auv_name} mission {self.args.mission}"
f"Combined LRAUV data from {log_file} - relevant variables extracted for STOQS"
)
metadata["summary"] = (
"Observational oceanographic data obtained from an Autonomous"
"Observational oceanographic data obtained from a Long Range Autonomous"
" Underwater Vehicle mission with measurements at"
" original sampling intervals. The data have been processed"
" by MBARI's auv-python software."
Expand Down Expand Up @@ -557,13 +557,23 @@ def combine_groups(self):
group_files = sorted(src_dir.glob(f"{Path(log_file).stem}_{GROUP}_*.nc"))
self.combined_nc = xr.Dataset()
for group_file in group_files:
self.logger.info("Found group file: %s", group_file)
self.logger.info("Group file: %s", group_file.name)
# Make nudged_longitude, nudged_latitude = self._nudge_pos() call on when appropriate
# Loop through each variable in the group file and add it to the combined_nc member list
with xr.open_dataset(group_file) as ds:
for orig_var in ds.variables:
if orig_var.lower().endswith("time"):
self.logger.debug("Skipping time variable: %s", orig_var)
continue
new_group = group_file.stem.split(f"{GROUP}_")[1].replace("_", "").lower()
new_var = new_group + "_" + orig_var.lower()
self.logger.info("Adding variable %-65s %s", f"{orig_var} as", new_var)
self.combined_nc[new_var] = ds[orig_var]

def write_netcdf(self) -> None:
log_file = self.args.log_file
netcdfs_dir = Path(BASE_LRAUV_PATH, Path(log_file).parent)
out_fn = Path(netcdfs_dir, f"{self.args.log_file.stem}_cal.nc")
out_fn = Path(netcdfs_dir, f"{Path(log_file).stem}_cal.nc")

self.combined_nc.attrs = self.global_metadata()
self.logger.info("Writing combined group data to %s", out_fn)
Expand Down Expand Up @@ -641,5 +651,5 @@ def process_command_line(self):
combine.process_command_line()
start = time.time()
combine.combine_groups()
##combine.write_netcdf()
combine.write_netcdf()
combine.logger.info("Time to process: %.2f seconds", (time.time() - start))
3 changes: 1 addition & 2 deletions src/data/correct_log_times.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
from pathlib import Path
from shutil import copyfile

from AUV import AUV
from logs2netcdfs import AUV_NetCDF
from readauvlog import log_record

Expand All @@ -41,7 +40,7 @@
TIME = "time"


class TimeCorrect(AUV):
class TimeCorrect:
logger = logging.getLogger(__name__)
_handler = logging.StreamHandler()
_handler.setFormatter(AUV_NetCDF._formatter)
Expand Down
27 changes: 25 additions & 2 deletions src/data/logs2netcdfs.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,17 @@
import subprocess
import sys
import time
from datetime import UTC, datetime
from http import HTTPStatus
from pathlib import Path

import aiofiles
import coards
import numpy as np
import requests
from aiohttp import ClientSession
from aiohttp.client_exceptions import ClientConnectorError
from AUV import AUV, monotonic_increasing_time_indices
from AUV import monotonic_increasing_time_indices
from netCDF4 import Dataset
from readauvlog import log_record

Expand Down Expand Up @@ -57,7 +59,7 @@ class CustomException(Exception):
pass


class AUV_NetCDF(AUV):
class AUV_NetCDF:
logger = logging.getLogger(__name__)
_handler = logging.StreamHandler()
_formatter = logging.Formatter(
Expand Down Expand Up @@ -662,6 +664,27 @@ def _remove_bad_values(self, netcdf_filename):
self.nc_file.close()
self.logger.info("Wrote (without bad values) %s", netcdf_filename)

def add_global_metadata(self):
iso_now = datetime.now(UTC).isoformat() + "Z"

self.nc_file.netcdf_version = "4"
self.nc_file.Conventions = "CF-1.6"
self.nc_file.date_created = iso_now
self.nc_file.date_update = iso_now
self.nc_file.date_modified = iso_now
self.nc_file.featureType = "trajectory"

self.nc_file.comment = ""

self.nc_file.time_coverage_start = (
coards.from_udunits(self.time[0], self.time.units).isoformat() + "Z"
)
self.nc_file.time_coverage_end = (
coards.from_udunits(self.time[-1], self.time.units).isoformat() + "Z"
)

self.nc_file.distribution_statement = "Any use requires prior approval from MBARI"

def _process_log_file(self, log_filename, netcdf_filename, src_dir=None):
log_data = self.read(log_filename)
if Path(netcdf_filename).exists():
Expand Down
Loading