Skip to content

Commit

Permalink
Merge pull request #151 from rakaly/token-write
Browse files Browse the repository at this point in the history
Add binary::Token::write for outputting binary data
  • Loading branch information
nickbabcock authored Dec 28, 2023
2 parents 966fd53 + d33804c commit c059ee3
Show file tree
Hide file tree
Showing 2 changed files with 168 additions and 22 deletions.
75 changes: 75 additions & 0 deletions src/binary/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -219,6 +219,81 @@ pub enum Token<'a> {
Id(u16),
}

impl<'a> Token<'a> {
fn write_u32(mut wtr: impl std::io::Write, num: u32) -> Result<(), std::io::Error> {
wtr.write_all(&LexemeId::U32.0.to_le_bytes())?;
wtr.write_all(&num.to_le_bytes())
}

/// Write the binary representation of a token to a writer
///
/// ```rust
/// use jomini::binary::Token;
/// let out = Vec::new();
/// let mut cursor = std::io::Cursor::new(out);
/// for token in &[Token::Id(0x00e1), Token::Equal, Token::U32(10)] {
/// token.write(&mut cursor)?;
/// }
///
/// assert_eq!(&cursor.into_inner(), &[0xe1, 0x00, 0x01, 0x00, 0x14, 0x00, 0x0a, 0x00, 0x00, 0x00]);
/// # Ok::<(), Box<dyn std::error::Error>>(())
/// ```
pub fn write(&self, mut wtr: impl std::io::Write) -> Result<(), std::io::Error> {
match self {
Token::Open => wtr.write_all(&LexemeId::OPEN.0.to_le_bytes()),
Token::Close => wtr.write_all(&LexemeId::CLOSE.0.to_le_bytes()),
Token::Equal => wtr.write_all(&LexemeId::EQUAL.0.to_le_bytes()),
Token::U32(x) => Token::write_u32(wtr, *x),
Token::U64(x) => {
wtr.write_all(&LexemeId::U64.0.to_le_bytes())?;
wtr.write_all(&x.to_le_bytes())
}
Token::I32(x) => {
wtr.write_all(&LexemeId::I32.0.to_le_bytes())?;
wtr.write_all(&x.to_le_bytes())
}
Token::Bool(x) => {
wtr.write_all(&LexemeId::BOOL.0.to_le_bytes())?;
wtr.write_all(&[if *x { 1u8 } else { 0 }])
}
Token::Quoted(x) => {
wtr.write_all(&LexemeId::QUOTED.0.to_le_bytes())?;
wtr.write_all(&(x.as_bytes().len() as u16).to_le_bytes())?;
wtr.write_all(x.as_bytes())
}
Token::Unquoted(x) => {
wtr.write_all(&LexemeId::UNQUOTED.0.to_le_bytes())?;
wtr.write_all(&(x.as_bytes().len() as u16).to_le_bytes())?;
wtr.write_all(x.as_bytes())
}
Token::F32(x) => {
wtr.write_all(&LexemeId::F32.0.to_le_bytes())?;
wtr.write_all(x)
}
Token::F64(x) => {
wtr.write_all(&LexemeId::F64.0.to_le_bytes())?;
wtr.write_all(x)
}
Token::Rgb(x) => {
wtr.write_all(&LexemeId::RGB.0.to_le_bytes())?;
wtr.write_all(&LexemeId::OPEN.0.to_le_bytes())?;
Token::write_u32(&mut wtr, x.r)?;
Token::write_u32(&mut wtr, x.g)?;
Token::write_u32(&mut wtr, x.b)?;
if let Some(a) = x.a.as_ref() {
Token::write_u32(&mut wtr, *a)?;
}
wtr.write_all(&LexemeId::CLOSE.0.to_le_bytes())
}
Token::I64(x) => {
wtr.write_all(&LexemeId::I64.0.to_le_bytes())?;
wtr.write_all(&x.to_le_bytes())
}
Token::Id(x) => wtr.write_all(&x.to_le_bytes()),
}
}
}

#[inline]
pub(crate) fn read_token(data: &[u8]) -> Result<(Token, &[u8]), LexError> {
let (id, data) = read_id(data)?;
Expand Down
115 changes: 93 additions & 22 deletions src/binary/reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -391,33 +391,104 @@ impl From<BufferError> for ReaderErrorKind {
#[cfg(test)]
mod tests {
use super::*;
use crate::{binary::Rgb, Scalar};
use rstest::*;

#[rstest]
#[case(&[
Token::Id(0x2838),
Token::Equal,
Token::Open,
Token::Id(0x2863),
Token::Equal,
Token::Unquoted(Scalar::new(b"western")),
Token::Quoted(Scalar::new(b"1446.5.31")),
Token::Equal,
Token::Id(0x2838),
Token::Close,
])]
#[case(&[
Token::Id(0x2ec9),
Token::Equal,
Token::Open,
Token::Id(0x28e2),
Token::Equal,
Token::I32(1),
Token::Id(0x28e3),
Token::Equal,
Token::I32(11),
Token::Id(0x2ec7),
Token::Equal,
Token::I32(4),
Token::Id(0x2ec8),
Token::Equal,
Token::I32(0),
Token::Close,
])]
#[case(&[
Token::Id(0x053a),
Token::Equal,
Token::Rgb(Rgb {
r: 110,
g: 28,
b: 27,
a: None
})
])]
#[case(&[
Token::Id(0x053a),
Token::Equal,
Token::Rgb(Rgb {
r: 110,
g: 28,
b: 27,
a: Some(128),
})
])]
#[case(&[
Token::Id(0x326b), Token::Equal, Token::U64(128),
Token::Id(0x326b), Token::Equal, Token::I64(-1),
Token::Id(0x2d82), Token::Equal, Token::F64([0xc7, 0xe4, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]),
Token::Id(0x2d82), Token::Equal, Token::F32([0x8f, 0xc2, 0x75, 0x3e]),
Token::Id(0x2d82), Token::Equal, Token::U32(89)
])]
fn test_roundtrip(#[case] input: &[Token]) {
let data = Vec::new();
let mut writer = std::io::Cursor::new(data);
for tok in input {
tok.write(&mut writer).unwrap();
}

fn test_reader(data: &[u8], expected: &[Token]) {
fn eq<R>(mut reader: TokenReader<R>, expected: &[Token])
where
R: Read,
{
for token in expected {
assert_eq!(reader.next().unwrap(), Some(*token));
}
assert_eq!(reader.next().unwrap(), None);
let data = writer.into_inner();

// `Read`
let mut reader = TokenReader::new(data.as_slice());
for (i, e) in input.iter().enumerate() {
assert_eq!(*e, reader.read().unwrap(), "failure at token idx: {}", i);
}

eq(TokenReader::new(data), expected);
reader.read().unwrap_err();
assert_eq!(reader.position(), data.len());

let data_with_header: Vec<_> = b"EU4bin".iter().chain(data).copied().collect();
let mut reader = TokenReader::new(data_with_header.as_slice());
assert_eq!(reader.read_bytes(6).unwrap(), &b"EU4bin"[..]);
eq(reader, expected);
}
// `from_slice`
let mut reader = TokenReader::from_slice(data.as_slice());
for (i, e) in input.iter().enumerate() {
assert_eq!(*e, reader.read().unwrap(), "failure at token idx: {}", i);
}

#[test]
fn test_binary_token_reader() {
let data = [0xe1, 0x00, 0x01, 0x00, 0x03, 0x00, 0x04, 0x00];
test_reader(
&data,
&[Token::Id(0x00e1), Token::Equal, Token::Open, Token::Close],
);
reader.read().unwrap_err();
assert_eq!(reader.position(), data.len());

// reader buffer size
for i in 30..40 {
let mut reader = TokenReader::builder().buffer_len(i).build(data.as_slice());
for e in input {
assert_eq!(*e, reader.read().unwrap(), "failure at token idx: {}", i);
}

reader.read().unwrap_err();
assert_eq!(reader.position(), data.len());
}
}

#[test]
Expand Down

0 comments on commit c059ee3

Please sign in to comment.