Skip to content

Commit

Permalink
Minor cleanups and fixes
Browse files Browse the repository at this point in the history
Signed-off-by: David Gilligan-Cook <dcook@imageworks.com>
  • Loading branch information
dcookspi committed Feb 14, 2024
1 parent d1f425f commit aa1fb04
Show file tree
Hide file tree
Showing 4 changed files with 12 additions and 3 deletions.
2 changes: 1 addition & 1 deletion crates/spfs-cli/main/src/cmd_run.rs
Original file line number Diff line number Diff line change
Expand Up @@ -258,7 +258,7 @@ impl CmdRun {
// These are added in reverse order so that the ones
// specified later on the command line will take precedence.
for (key, value) in data.into_iter().rev() {
tracing::trace!("ex data being added: {key}: {value}");
tracing::trace!("external data being added: {key}: {value}");
runtime
.add_external_data(key, value, config.filesystem.external_data_size_limit)
.await?;
Expand Down
3 changes: 2 additions & 1 deletion crates/spfs/src/runtime/storage.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1315,7 +1315,8 @@ impl Storage {
}
}
_ => {
// None of the other objects contain ExternalDataLayers
// None of the other objects could contain
// pieces of ExternalData
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion crates/spfs/src/sync.rs
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,7 @@ where

/// Sync one environment item and any associated data.
pub async fn sync_env_item(&self, item: tracking::EnvSpecItem) -> Result<SyncEnvItemResult> {
tracing::debug!(?item, "Syncing item: {item}");
tracing::debug!(?item, "Syncing item");
self.reporter.visit_env_item(&item);
let res = match item {
tracking::EnvSpecItem::Digest(digest) => self
Expand Down
8 changes: 8 additions & 0 deletions crates/spk-storage/src/storage/archive.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,19 +27,22 @@ where
_ => tracing::warn!("Error trying to remove old file: {:?}", err),
}
}

filename
.parent()
.map(|dir| {
std::fs::create_dir_all(dir)
.map_err(|err| Error::DirectoryCreateError(dir.to_owned(), err))
})
.unwrap_or_else(|| Ok(()))?;

// Don't require the "origin" repo to exist here.
let (local_repo, remote_repo) = tokio::join!(
super::local_repository(),
super::remote_repository("origin"),
);
let local_repo = local_repo?;

let tar_repo = spfs::storage::tar::TarRepository::create(&filename)
.await
.map_err(|source| spfs::Error::FailedToOpenRepository {
Expand All @@ -49,10 +52,12 @@ where
// Package exports should not include the top-level directory for
// durable runtime upperdir edits.
tar_repo.remove_durable_dir().await?;

let mut target_repo = super::SpfsRepository::try_from((
"archive",
spfs::storage::RepositoryHandle::from(tar_repo),
))?;

// these are sorted to ensure that the recipe is published
// before any build - it's only an error in testing, but still best practice
let mut to_transfer = std::collections::BTreeSet::new();
Expand Down Expand Up @@ -80,6 +85,7 @@ where
} else {
to_transfer.insert(pkg.with_build(None));
}

for transfer_pkg in to_transfer.into_iter() {
if transfer_pkg.is_embedded() {
// Don't attempt to export an embedded package; the stub
Expand Down Expand Up @@ -160,6 +166,7 @@ where
.or(remote_err)
.unwrap_or_else(|| Error::PackageNotFound(transfer_pkg)));
}

tracing::info!(path=?filename, "building archive");
use std::ops::DerefMut;
if let spfs::storage::RepositoryHandle::Tar(tar) = target_repo.deref_mut() {
Expand Down Expand Up @@ -199,6 +206,7 @@ async fn copy_package(
) -> Result<()> {
let spec = src_repo.read_package(pkg).await?;
let components = src_repo.read_components(pkg).await?;
tracing::info!(%pkg, "exporting");
let syncer = spfs::Syncer::new(src_repo, dst_repo)
.with_reporter(spfs::sync::ConsoleSyncReporter::default());
let desired = components.iter().map(|i| *i.1).collect();
Expand Down

0 comments on commit aa1fb04

Please sign in to comment.