Skip to content

Commit

Permalink
Merge pull request #496 from amazonlinux/recombine-tough
Browse files Browse the repository at this point in the history
tough: Merge in tough_schema
  • Loading branch information
iliana authored Nov 8, 2019
2 parents cb17aef + 7a78939 commit 0fc51ed
Show file tree
Hide file tree
Showing 30 changed files with 87 additions and 127 deletions.
20 changes: 3 additions & 17 deletions workspaces/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 0 additions & 1 deletion workspaces/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@ members = [
"updater/block-party",
"updater/signpost",
"updater/tough",
"updater/tough_schema",
"updater/update_metadata",
"updater/updog",

Expand Down
1 change: 0 additions & 1 deletion workspaces/deny.toml
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,6 @@ skip = [
{ name = "testmigration", licenses = [] },
{ name = "thar-be-settings", licenses = [] },
{ name = "tough", licenses = [] },
{ name = "tough_schema", licenses = [] },
{ name = "tuftool", licenses = [] },
{ name = "update_metadata", licenses = [] },
{ name = "updog", licenses = [] },
Expand Down
1 change: 0 additions & 1 deletion workspaces/tuftool/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@ sha2 = "0.8.0"
snafu = { version = "0.5.0", features = ["backtrace-crate"] }
structopt = "0.3"
tempfile = "3.1.0"
tough_schema = { path = "../updater/tough_schema" }
url = "2.1.0"
walkdir = "2.2.9"
tempdir = "0.3.7"
Expand Down
7 changes: 3 additions & 4 deletions workspaces/tuftool/src/create.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,9 @@ use std::fs::File;
use std::num::{NonZeroU64, NonZeroUsize};
use std::path::{Path, PathBuf};
use structopt::StructOpt;
use tough_schema::decoded::Decoded;
use tough_schema::{
Hashes, Role, RoleType, Root, Signed, Snapshot, SnapshotMeta, Target, Targets, Timestamp,
TimestampMeta,
use tough::schema::{
decoded::Decoded, Hashes, Role, RoleType, Root, Signed, Snapshot, SnapshotMeta, Target,
Targets, Timestamp, TimestampMeta,
};
use walkdir::WalkDir;

Expand Down
2 changes: 1 addition & 1 deletion workspaces/tuftool/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ pub(crate) enum Error {
#[snafu(display("Failed to calculate key ID: {}", source))]
KeyId {
#[snafu(backtrace)]
source: tough_schema::Error,
source: tough::schema::Error,
},

#[snafu(display("Private key rejected: {}", source))]
Expand Down
8 changes: 4 additions & 4 deletions workspaces/tuftool/src/key.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,9 @@ use ring::signature::{KeyPair as _, RsaKeyPair};
use serde::Serialize;
use snafu::ResultExt;
use std::collections::HashMap;
use tough_schema::decoded::{Decoded, Hex};
use tough_schema::key::Key;
use tough_schema::{Role, RoleType, Root, Signature, Signed};
use tough::schema::decoded::{Decoded, Hex};
use tough::schema::key::Key;
use tough::schema::{Role, RoleType, Root, Signature, Signed};

#[derive(Debug)]
pub(crate) enum KeyPair {
Expand Down Expand Up @@ -49,7 +49,7 @@ impl KeyPair {
}

pub(crate) fn public_key(&self) -> Key {
use tough_schema::key::{RsaKey, RsaScheme};
use tough::schema::key::{RsaKey, RsaScheme};

match self {
KeyPair::Rsa(key_pair) => Key::Rsa {
Expand Down
5 changes: 2 additions & 3 deletions workspaces/tuftool/src/root.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,8 @@ use std::collections::HashMap;
use std::num::NonZeroU64;
use std::path::PathBuf;
use structopt::StructOpt;
use tough_schema::decoded::{Decoded, Hex};
use tough_schema::key::Key;
use tough_schema::{RoleKeys, RoleType, Root, Signed};
use tough::schema::decoded::{Decoded, Hex};
use tough::schema::{key::Key, RoleKeys, RoleType, Root, Signed};

#[derive(Debug, StructOpt)]
pub(crate) enum Command {
Expand Down
2 changes: 1 addition & 1 deletion workspaces/tuftool/src/sign.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::path::PathBuf;
use structopt::StructOpt;
use tough_schema::{RoleType, Root, Signed};
use tough::schema::{RoleType, Root, Signed};

#[derive(Debug, StructOpt)]
pub(crate) struct SignArgs {
Expand Down
2 changes: 1 addition & 1 deletion workspaces/tuftool/src/source.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ use crate::key::KeyPair;
use snafu::{OptionExt, ResultExt};
use std::path::PathBuf;
use std::str::FromStr;
use tough_schema::key::Key;
use tough::schema::key::Key;
use url::Url;

#[derive(Debug)]
Expand Down
8 changes: 6 additions & 2 deletions workspaces/updater/tough/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,16 @@ publish = false
[dependencies]
chrono = { version = "0.4.6", features = ["serde"] }
hex = "0.4.0"
olpc-cjson = { path = "../olpc-cjson" }
pem = "0.6.0"
reqwest = { version = "0.9.17", optional = true, default-features = false }
serde = "1.0.92"
ring = "0.16.7"
serde = { version = "1.0.92", features = ["derive"] }
serde_json = "1.0.39"
serde_plain = "0.3.0"
sha2 = "0.8.0"
snafu = "0.5.0"
tough_schema = { path = "../tough_schema" }
untrusted = "0.7.0"
url = "2.1.0"

[dev-dependencies]
Expand Down
3 changes: 2 additions & 1 deletion workspaces/updater/tough/src/datastore.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,9 @@ impl<'a> Datastore<'a> {
File::create(&path).context(error::DatastoreCreate { path: &path })?,
value,
)
.context(error::JsonSerialization {
.context(error::DatastoreSerialize {
what: format!("{} in datastore", file),
path,
})
}

Expand Down
23 changes: 12 additions & 11 deletions workspaces/updater/tough/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,10 @@

#![allow(clippy::default_trait_access)]

use crate::schema::RoleType;
use chrono::{DateTime, Utc};
use snafu::{Backtrace, Snafu};
use std::path::PathBuf;
use tough_schema::RoleType;

/// Alias for `Result<T, Error>`.
pub type Result<T> = std::result::Result<T, Error>;
Expand Down Expand Up @@ -38,6 +38,15 @@ pub enum Error {
backtrace: Backtrace,
},

/// The library failed to serialize an object to JSON to the datastore.
#[snafu(display("Failed to serialize {} to JSON at datastore path {}: {}", what, path.display(), source))]
DatastoreSerialize {
what: String,
path: PathBuf,
source: serde_json::Error,
backtrace: Backtrace,
},

/// A metadata file has expired.
#[snafu(display("{} metadata is expired", role))]
ExpiredMetadata {
Expand Down Expand Up @@ -69,14 +78,6 @@ pub enum Error {
backtrace: Backtrace,
},

/// The library failed to serialize an object to JSON.
#[snafu(display("Failed to serialize {} to JSON: {}", what, source))]
JsonSerialization {
what: String,
source: serde_json::Error,
backtrace: Backtrace,
},

/// A file's maximum size exceeded a limit set by the consumer of this library or the metadata.
#[snafu(display("Maximum size {} (specified by {}) exceeded", max_size, specifier))]
MaxSizeExceeded {
Expand Down Expand Up @@ -171,14 +172,14 @@ pub enum Error {
#[snafu(display("Failed to verify {} metadata: {}", role, source))]
VerifyMetadata {
role: RoleType,
source: tough_schema::Error,
source: crate::schema::Error,
backtrace: Backtrace,
},

/// The trusted root metadata file could not be verified.
#[snafu(display("Failed to verify trusted root metadata: {}", source))]
VerifyTrustedMetadata {
source: tough_schema::Error,
source: crate::schema::Error,
backtrace: Backtrace,
},

Expand Down
13 changes: 7 additions & 6 deletions workspaces/updater/tough/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ mod datastore;
pub mod error;
mod fetch;
mod io;
pub mod schema;
mod transport;

#[cfg(feature = "http")]
Expand All @@ -25,13 +26,13 @@ pub use crate::transport::{FilesystemTransport, Transport};
use crate::datastore::Datastore;
use crate::error::Result;
use crate::fetch::{fetch_max_size, fetch_sha256};
use crate::schema::{Role, RoleType, Root, Signed, Snapshot, Timestamp};
use chrono::{DateTime, Utc};
use snafu::{ensure, OptionExt, ResultExt};
use std::borrow::Cow;
use std::collections::HashMap;
use std::io::Read;
use std::path::Path;
use tough_schema::{Role, RoleType, Root, Signed, Snapshot, Timestamp};
use url::Url;

/// Repository fetch settings, provided to [`Repository::load`].
Expand Down Expand Up @@ -276,8 +277,8 @@ pub struct Target {
pub length: u64,
}

impl From<tough_schema::Target> for Target {
fn from(target: tough_schema::Target) -> Self {
impl From<crate::schema::Target> for Target {
fn from(target: crate::schema::Target) -> Self {
Self {
custom: target.custom,
sha256: target.hashes.sha256.into_vec(),
Expand Down Expand Up @@ -698,7 +699,7 @@ fn load_targets<T: Transport>(
datastore: &Datastore<'_>,
max_targets_size: u64,
metadata_base_url: &Url,
) -> Result<Signed<tough_schema::Targets>> {
) -> Result<Signed<crate::schema::Targets>> {
// 4. Download the top-level targets metadata file, up to either the number of bytes specified
// in the snapshot metadata file, or some Z number of bytes. The value for Z is set by the
// authors of the application using TUF. For example, Z may be tens of kilobytes. If
Expand Down Expand Up @@ -745,7 +746,7 @@ fn load_targets<T: Transport>(
specifier,
)?)
};
let targets: Signed<tough_schema::Targets> =
let targets: Signed<crate::schema::Targets> =
serde_json::from_reader(reader).context(error::ParseMetadata {
role: RoleType::Targets,
})?;
Expand Down Expand Up @@ -781,7 +782,7 @@ fn load_targets<T: Transport>(
// it, abort the update cycle, and report the potential rollback attack.
if let Some(Ok(old_targets)) = datastore
.reader("targets.json")?
.map(serde_json::from_reader::<_, Signed<tough_schema::Targets>>)
.map(serde_json::from_reader::<_, Signed<crate::schema::Targets>>)
{
if root.signed.verify_role(&old_targets).is_ok() {
ensure!(
Expand Down
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
use crate::decoded::{Decoded, Hex};
use crate::error;
use crate::key::Key;
use crate::schema::decoded::{Decoded, Hex};
use crate::schema::error;
use crate::schema::key::Key;
use serde::{de::Error as _, Deserialize, Deserializer};
use snafu::ensure;
use std::collections::HashMap;
use std::fmt;

/// Validates the key ID for each key during deserialization and fails if any don't match.
pub(crate) fn deserialize_keys<'de, D>(
pub(super) fn deserialize_keys<'de, D>(
deserializer: D,
) -> Result<HashMap<Decoded<Hex>, Key>, D::Error>
where
Expand All @@ -23,15 +23,14 @@ where
map: &mut HashMap<Decoded<Hex>, Key>,
) -> Result<(), error::Error> {
let calculated = key.key_id()?;
let keyid_hex = hex::encode(&keyid);
ensure!(
keyid == calculated,
error::HashMismatch {
context: "key".to_owned(),
error::InvalidKeyId {
keyid: &keyid_hex,
calculated: hex::encode(&calculated),
expected: hex::encode(&keyid),
}
);
let keyid_hex = hex::encode(&keyid); // appease borrowck
ensure!(
map.insert(keyid, key).is_none(),
error::DuplicateKeyId { keyid: keyid_hex }
Expand Down Expand Up @@ -65,7 +64,7 @@ where
}

/// Deserializes the `_extra` field on roles, skipping the `_type` tag.
pub(crate) fn extra_skip_type<'de, D>(
pub(super) fn extra_skip_type<'de, D>(
deserializer: D,
) -> Result<HashMap<String, serde_json::Value>, D::Error>
where
Expand All @@ -78,12 +77,12 @@ where

#[cfg(test)]
mod tests {
use crate::{Root, Signed};
use crate::schema::{Root, Signed};

#[test]
fn duplicate_keyid() {
assert!(serde_json::from_str::<Signed<Root>>(include_str!(
"../tests/data/duplicate-keyid/root.json"
"../../tests/data/duplicate-keyid/root.json"
))
.is_err());
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use crate::error::{self, Error};
use crate::spki;
use crate::schema::error::{self, Error};
use crate::schema::spki;
use serde::{de::Error as _, Deserialize, Deserializer, Serialize, Serializer};
use snafu::ResultExt;
use std::cmp::Ordering;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

#![allow(clippy::default_trait_access)]

use crate::RoleType;
use crate::schema::RoleType;
use snafu::{Backtrace, Snafu};
use std::fmt::{self, Debug, Display};

Expand All @@ -11,24 +11,18 @@ pub type Result<T> = std::result::Result<T, Error>;

/// The error type for this library.
#[derive(Debug, Snafu)]
#[snafu(visibility = "pub(crate)")]
#[snafu(visibility = "pub(super)")]
pub enum Error {
/// A duplicate key ID was present in the root metadata.
#[snafu(display("Duplicate key ID: {}", keyid))]
DuplicateKeyId { keyid: String },

/// A downloaded target's checksum does not match the checksum listed in the repository
/// metadata.
#[snafu(display(
"Hash mismatch for {}: calculated {}, expected {}",
context,
calculated,
expected,
))]
HashMismatch {
context: String,
#[snafu(display("Invalid key ID {}: calculated {}", keyid, calculated))]
InvalidKeyId {
keyid: String,
calculated: String,
expected: String,
backtrace: Backtrace,
},

Expand Down
Loading

0 comments on commit 0fc51ed

Please sign in to comment.