From 4951d9c7fd48999d09e0e4af487278e91a997746 Mon Sep 17 00:00:00 2001 From: Nick Macholl Date: Thu, 8 Aug 2024 09:18:00 -0700 Subject: [PATCH 1/5] ADD: Add type stub for pretty_ts_ref in StatMsg --- CHANGELOG.md | 5 +++++ python/python/databento_dbn/_lib.pyi | 12 ++++++++++++ 2 files changed, 17 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 34171d5..f53face 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,10 @@ # Changelog +## 0.20.1 - TBD + +### Bug fixes +- Added missing Python type stub for `pretty_ts_ref` in `StatMsg` + ## 0.20.0 - 2024-07-30 ### Enhancements diff --git a/python/python/databento_dbn/_lib.pyi b/python/python/databento_dbn/_lib.pyi index 4ffdff4..2f34f8e 100644 --- a/python/python/databento_dbn/_lib.pyi +++ b/python/python/databento_dbn/_lib.pyi @@ -4258,6 +4258,18 @@ class StatMsg(Record): """ + @property + def pretty_ts_ref(self) -> dt.datetime: + """ + Reference timestamp expressed as the number of nanoseconds since the + UNIX epoch as a datetime or `pandas.Timestamp`, if available. + + Returns + ------- + datetime.datetime + + """ + @property def ts_ref(self) -> int: """ From 1a522e9f92efd83c2f67e8ac079034178370c005 Mon Sep 17 00:00:00 2001 From: Carter Green Date: Wed, 14 Aug 2024 11:28:41 -0500 Subject: [PATCH 2/5] ADD: Add buffering in async dyn writer --- CHANGELOG.md | 3 ++ rust/dbn/src/encode.rs | 2 +- rust/dbn/src/encode/dyn_writer.rs | 84 ++++++++++++++++++++++++++++++- 3 files changed, 87 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f53face..7ea8e02 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,9 @@ ## 0.20.1 - TBD +### Enhancements +- Added `DynAsyncBufWriter` for buffering compressed or uncompressed async output + ### Bug fixes - Added missing Python type stub for `pretty_ts_ref` in `StatMsg` diff --git a/rust/dbn/src/encode.rs b/rust/dbn/src/encode.rs index b1eb586..ac7e291 100644 --- a/rust/dbn/src/encode.rs +++ b/rust/dbn/src/encode.rs @@ -27,7 +27,7 @@ pub use self::{ AsyncEncoder as AsyncDbnEncoder, AsyncMetadataEncoder as AsyncDbnMetadataEncoder, AsyncRecordEncoder as AsyncDbnRecordEncoder, }, - dyn_writer::DynAsyncWriter, + dyn_writer::{DynAsyncBufWriter, DynAsyncWriter}, json::AsyncEncoder as AsyncJsonEncoder, }; diff --git a/rust/dbn/src/encode/dyn_writer.rs b/rust/dbn/src/encode/dyn_writer.rs index 8f25075..d038ffa 100644 --- a/rust/dbn/src/encode/dyn_writer.rs +++ b/rust/dbn/src/encode/dyn_writer.rs @@ -81,6 +81,8 @@ where } } +#[cfg(feature = "async")] +pub use r#async::DynBufWriter as DynAsyncBufWriter; #[cfg(feature = "async")] pub use r#async::DynWriter as DynAsyncWriter; @@ -92,11 +94,91 @@ mod r#async { }; use async_compression::tokio::write::ZstdEncoder; - use tokio::io; + use tokio::io::{self, BufWriter}; use crate::{encode::async_zstd_encoder, enums::Compression}; + /// An object that allows for abstracting over compressed and uncompressed output + /// with buffering. + pub struct DynBufWriter(DynBufWriterImpl) + where + W: io::AsyncWriteExt + Unpin, + B: io::AsyncWriteExt + Unpin; + + enum DynBufWriterImpl + where + W: io::AsyncWriteExt + Unpin, + B: io::AsyncWriteExt + Unpin, + { + Uncompressed(B), + ZStd(ZstdEncoder), + } + + impl DynBufWriter + where + W: io::AsyncWriteExt + Unpin, + { + /// Creates a new instance of [`DynWriter`] which will wrap `writer` with + /// `compression`. + pub fn new(writer: W, compression: Compression) -> Self { + Self(match compression { + Compression::None => DynBufWriterImpl::Uncompressed(writer), + Compression::ZStd => DynBufWriterImpl::ZStd(async_zstd_encoder(writer)), + }) + } + } + + impl DynBufWriter> + where + W: io::AsyncWriteExt + Unpin, + { + /// Creates a new instance of [`DynWriter`], wrapping `writer` in a `BufWriter`. + pub fn new_buffered(writer: W, compression: Compression) -> Self { + Self(match compression { + Compression::None => DynBufWriterImpl::Uncompressed(BufWriter::new(writer)), + // `ZstdEncoder` already wraps `W` in a `BufWriter`, cf. + // https://github.com/Nullus157/async-compression/blob/main/src/tokio/write/generic/encoder.rs + Compression::ZStd => DynBufWriterImpl::ZStd(async_zstd_encoder(writer)), + }) + } + } + + impl io::AsyncWrite for DynBufWriter + where + W: io::AsyncWrite + io::AsyncWriteExt + Unpin, + { + fn poll_write( + mut self: Pin<&mut Self>, + cx: &mut Context<'_>, + buf: &[u8], + ) -> Poll> { + match &mut self.0 { + DynBufWriterImpl::Uncompressed(w) => { + io::AsyncWrite::poll_write(Pin::new(w), cx, buf) + } + DynBufWriterImpl::ZStd(enc) => io::AsyncWrite::poll_write(Pin::new(enc), cx, buf), + } + } + + fn poll_flush(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { + match &mut self.0 { + DynBufWriterImpl::Uncompressed(w) => io::AsyncWrite::poll_flush(Pin::new(w), cx), + DynBufWriterImpl::ZStd(enc) => io::AsyncWrite::poll_flush(Pin::new(enc), cx), + } + } + + fn poll_shutdown(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { + match &mut self.0 { + DynBufWriterImpl::Uncompressed(w) => io::AsyncWrite::poll_shutdown(Pin::new(w), cx), + DynBufWriterImpl::ZStd(enc) => io::AsyncWrite::poll_shutdown(Pin::new(enc), cx), + } + } + } + /// An object that allows for abstracting over compressed and uncompressed output. + /// + /// Compared with [`DynBufWriter`], only the compressed output is buffered, as it is + /// required by the async Zstd implementation. pub struct DynWriter(DynWriterImpl) where W: io::AsyncWriteExt + Unpin; From 0490b4b75883ae5f15e18d12fe8d1633be3ad0a3 Mon Sep 17 00:00:00 2001 From: Zach Banks Date: Mon, 12 Aug 2024 14:57:51 -0400 Subject: [PATCH 3/5] ADD: Add XCIS.BBOTRADES and XNYS.BBOTRADES enums --- CHANGELOG.md | 1 + rust/dbn/src/publishers.rs | 28 +++++++++++++++++++++++++--- 2 files changed, 26 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7ea8e02..f1fcec8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,7 @@ ### Enhancements - Added `DynAsyncBufWriter` for buffering compressed or uncompressed async output +- Added new publisher values for `XCIS.BBOTRADES` and `XNYS.BBOTRADES` ### Bug fixes - Added missing Python type stub for `pretty_ts_ref` in `StatMsg` diff --git a/rust/dbn/src/publishers.rs b/rust/dbn/src/publishers.rs index 21771ce..5b06d55 100644 --- a/rust/dbn/src/publishers.rs +++ b/rust/dbn/src/publishers.rs @@ -285,14 +285,18 @@ pub enum Dataset { NdexImpact = 29, /// Databento Equities Max DbeqMax = 30, - /// Nasdaq Basic (NLS+QBBO) + /// Nasdaq Basic (NLS and QBBO) XnasBasic = 31, /// Databento Equities Summary DbeqSummary = 32, + /// NYSE National BBO and Trades + XcisBbotrades = 33, + /// NYSE BBO and Trades + XnysBbotrades = 34, } /// The number of Dataset variants. -pub const DATASET_COUNT: usize = 32; +pub const DATASET_COUNT: usize = 34; impl Dataset { /// Convert a Dataset to its `str` representation. @@ -332,6 +336,8 @@ impl Dataset { Self::DbeqMax => "DBEQ.MAX", Self::XnasBasic => "XNAS.BASIC", Self::DbeqSummary => "DBEQ.SUMMARY", + Self::XcisBbotrades => "XCIS.BBOTRADES", + Self::XnysBbotrades => "XNYS.BBOTRADES", } } } @@ -387,6 +393,8 @@ impl std::str::FromStr for Dataset { "DBEQ.MAX" => Ok(Self::DbeqMax), "XNAS.BASIC" => Ok(Self::XnasBasic), "DBEQ.SUMMARY" => Ok(Self::DbeqSummary), + "XCIS.BBOTRADES" => Ok(Self::XcisBbotrades), + "XNYS.BBOTRADES" => Ok(Self::XnysBbotrades), _ => Err(Error::conversion::(s)), } } @@ -577,10 +585,14 @@ pub enum Publisher { XnasBasicXpsx = 89, /// Databento Equities Summary DbeqSummaryDbeq = 90, + /// NYSE National BBO and Trades + XcisBbotradesXcis = 91, + /// NYSE BBO and Trades + XnysBbotradesXnys = 92, } /// The number of Publisher variants. -pub const PUBLISHER_COUNT: usize = 90; +pub const PUBLISHER_COUNT: usize = 92; impl Publisher { /// Convert a Publisher to its `str` representation. @@ -676,6 +688,8 @@ impl Publisher { Self::XnasBasicXbos => "XNAS.BASIC.XBOS", Self::XnasBasicXpsx => "XNAS.BASIC.XPSX", Self::DbeqSummaryDbeq => "DBEQ.SUMMARY.DBEQ", + Self::XcisBbotradesXcis => "XCIS.BBOTRADES.XCIS", + Self::XnysBbotradesXnys => "XNYS.BBOTRADES.XNYS", } } @@ -772,6 +786,8 @@ impl Publisher { Self::XnasBasicXbos => Venue::Xbos, Self::XnasBasicXpsx => Venue::Xpsx, Self::DbeqSummaryDbeq => Venue::Dbeq, + Self::XcisBbotradesXcis => Venue::Xcis, + Self::XnysBbotradesXnys => Venue::Xnys, } } @@ -868,6 +884,8 @@ impl Publisher { Self::XnasBasicXbos => Dataset::XnasBasic, Self::XnasBasicXpsx => Dataset::XnasBasic, Self::DbeqSummaryDbeq => Dataset::DbeqSummary, + Self::XcisBbotradesXcis => Dataset::XcisBbotrades, + Self::XnysBbotradesXnys => Dataset::XnysBbotrades, } } @@ -966,6 +984,8 @@ impl Publisher { (Dataset::XnasBasic, Venue::Xbos) => Ok(Self::XnasBasicXbos), (Dataset::XnasBasic, Venue::Xpsx) => Ok(Self::XnasBasicXpsx), (Dataset::DbeqSummary, Venue::Dbeq) => Ok(Self::DbeqSummaryDbeq), + (Dataset::XcisBbotrades, Venue::Xcis) => Ok(Self::XcisBbotradesXcis), + (Dataset::XnysBbotrades, Venue::Xnys) => Ok(Self::XnysBbotradesXnys), _ => Err(Error::conversion::(format!("({dataset}, {venue})"))), } } @@ -1078,6 +1098,8 @@ impl std::str::FromStr for Publisher { "XNAS.BASIC.XBOS" => Ok(Self::XnasBasicXbos), "XNAS.BASIC.XPSX" => Ok(Self::XnasBasicXpsx), "DBEQ.SUMMARY.DBEQ" => Ok(Self::DbeqSummaryDbeq), + "XCIS.BBOTRADES.XCIS" => Ok(Self::XcisBbotradesXcis), + "XNYS.BBOTRADES.XNYS" => Ok(Self::XnysBbotradesXnys), _ => Err(Error::conversion::(s)), } } From 4f661406ca13848999da9a352396f5ba9a7c333a Mon Sep 17 00:00:00 2001 From: Carter Green Date: Fri, 16 Aug 2024 11:24:25 -0500 Subject: [PATCH 4/5] MOD: Change casing of enums --- rust/dbn/src/encode/dyn_writer.rs | 40 +++++++++++++++---------------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/rust/dbn/src/encode/dyn_writer.rs b/rust/dbn/src/encode/dyn_writer.rs index d038ffa..8a30590 100644 --- a/rust/dbn/src/encode/dyn_writer.rs +++ b/rust/dbn/src/encode/dyn_writer.rs @@ -14,7 +14,7 @@ where W: io::Write, { Uncompressed(W), - ZStd(zstd::stream::AutoFinishEncoder<'a, W>), + Zstd(zstd::stream::AutoFinishEncoder<'a, W>), } impl<'a, W> DynWriter<'a, W> @@ -28,7 +28,7 @@ where pub fn new(writer: W, compression: Compression) -> Result { match compression { Compression::None => Ok(Self(DynWriterImpl::Uncompressed(writer))), - Compression::ZStd => zstd_encoder(writer).map(|enc| Self(DynWriterImpl::ZStd(enc))), + Compression::ZStd => zstd_encoder(writer).map(|enc| Self(DynWriterImpl::Zstd(enc))), } } @@ -36,7 +36,7 @@ where pub fn get_mut(&mut self) -> &mut W { match &mut self.0 { DynWriterImpl::Uncompressed(w) => w, - DynWriterImpl::ZStd(enc) => enc.get_mut(), + DynWriterImpl::Zstd(enc) => enc.get_mut(), } } } @@ -48,35 +48,35 @@ where fn write(&mut self, buf: &[u8]) -> io::Result { match &mut self.0 { DynWriterImpl::Uncompressed(writer) => writer.write(buf), - DynWriterImpl::ZStd(writer) => writer.write(buf), + DynWriterImpl::Zstd(writer) => writer.write(buf), } } fn flush(&mut self) -> io::Result<()> { match &mut self.0 { DynWriterImpl::Uncompressed(writer) => writer.flush(), - DynWriterImpl::ZStd(writer) => writer.flush(), + DynWriterImpl::Zstd(writer) => writer.flush(), } } fn write_vectored(&mut self, bufs: &[io::IoSlice<'_>]) -> io::Result { match &mut self.0 { DynWriterImpl::Uncompressed(writer) => writer.write_vectored(bufs), - DynWriterImpl::ZStd(writer) => writer.write_vectored(bufs), + DynWriterImpl::Zstd(writer) => writer.write_vectored(bufs), } } fn write_all(&mut self, buf: &[u8]) -> io::Result<()> { match &mut self.0 { DynWriterImpl::Uncompressed(writer) => writer.write_all(buf), - DynWriterImpl::ZStd(writer) => writer.write_all(buf), + DynWriterImpl::Zstd(writer) => writer.write_all(buf), } } fn write_fmt(&mut self, fmt: std::fmt::Arguments<'_>) -> io::Result<()> { match &mut self.0 { DynWriterImpl::Uncompressed(writer) => writer.write_fmt(fmt), - DynWriterImpl::ZStd(writer) => writer.write_fmt(fmt), + DynWriterImpl::Zstd(writer) => writer.write_fmt(fmt), } } } @@ -111,7 +111,7 @@ mod r#async { B: io::AsyncWriteExt + Unpin, { Uncompressed(B), - ZStd(ZstdEncoder), + Zstd(ZstdEncoder), } impl DynBufWriter @@ -123,7 +123,7 @@ mod r#async { pub fn new(writer: W, compression: Compression) -> Self { Self(match compression { Compression::None => DynBufWriterImpl::Uncompressed(writer), - Compression::ZStd => DynBufWriterImpl::ZStd(async_zstd_encoder(writer)), + Compression::ZStd => DynBufWriterImpl::Zstd(async_zstd_encoder(writer)), }) } } @@ -138,7 +138,7 @@ mod r#async { Compression::None => DynBufWriterImpl::Uncompressed(BufWriter::new(writer)), // `ZstdEncoder` already wraps `W` in a `BufWriter`, cf. // https://github.com/Nullus157/async-compression/blob/main/src/tokio/write/generic/encoder.rs - Compression::ZStd => DynBufWriterImpl::ZStd(async_zstd_encoder(writer)), + Compression::ZStd => DynBufWriterImpl::Zstd(async_zstd_encoder(writer)), }) } } @@ -156,21 +156,21 @@ mod r#async { DynBufWriterImpl::Uncompressed(w) => { io::AsyncWrite::poll_write(Pin::new(w), cx, buf) } - DynBufWriterImpl::ZStd(enc) => io::AsyncWrite::poll_write(Pin::new(enc), cx, buf), + DynBufWriterImpl::Zstd(enc) => io::AsyncWrite::poll_write(Pin::new(enc), cx, buf), } } fn poll_flush(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { match &mut self.0 { DynBufWriterImpl::Uncompressed(w) => io::AsyncWrite::poll_flush(Pin::new(w), cx), - DynBufWriterImpl::ZStd(enc) => io::AsyncWrite::poll_flush(Pin::new(enc), cx), + DynBufWriterImpl::Zstd(enc) => io::AsyncWrite::poll_flush(Pin::new(enc), cx), } } fn poll_shutdown(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { match &mut self.0 { DynBufWriterImpl::Uncompressed(w) => io::AsyncWrite::poll_shutdown(Pin::new(w), cx), - DynBufWriterImpl::ZStd(enc) => io::AsyncWrite::poll_shutdown(Pin::new(enc), cx), + DynBufWriterImpl::Zstd(enc) => io::AsyncWrite::poll_shutdown(Pin::new(enc), cx), } } } @@ -188,7 +188,7 @@ mod r#async { W: io::AsyncWriteExt + Unpin, { Uncompressed(W), - ZStd(ZstdEncoder), + Zstd(ZstdEncoder), } impl DynWriter @@ -200,7 +200,7 @@ mod r#async { pub fn new(writer: W, compression: Compression) -> Self { Self(match compression { Compression::None => DynWriterImpl::Uncompressed(writer), - Compression::ZStd => DynWriterImpl::ZStd(async_zstd_encoder(writer)), + Compression::ZStd => DynWriterImpl::Zstd(async_zstd_encoder(writer)), }) } @@ -208,7 +208,7 @@ mod r#async { pub fn get_mut(&mut self) -> &mut W { match &mut self.0 { DynWriterImpl::Uncompressed(w) => w, - DynWriterImpl::ZStd(enc) => enc.get_mut(), + DynWriterImpl::Zstd(enc) => enc.get_mut(), } } } @@ -224,21 +224,21 @@ mod r#async { ) -> Poll> { match &mut self.0 { DynWriterImpl::Uncompressed(w) => io::AsyncWrite::poll_write(Pin::new(w), cx, buf), - DynWriterImpl::ZStd(enc) => io::AsyncWrite::poll_write(Pin::new(enc), cx, buf), + DynWriterImpl::Zstd(enc) => io::AsyncWrite::poll_write(Pin::new(enc), cx, buf), } } fn poll_flush(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { match &mut self.0 { DynWriterImpl::Uncompressed(w) => io::AsyncWrite::poll_flush(Pin::new(w), cx), - DynWriterImpl::ZStd(enc) => io::AsyncWrite::poll_flush(Pin::new(enc), cx), + DynWriterImpl::Zstd(enc) => io::AsyncWrite::poll_flush(Pin::new(enc), cx), } } fn poll_shutdown(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { match &mut self.0 { DynWriterImpl::Uncompressed(w) => io::AsyncWrite::poll_shutdown(Pin::new(w), cx), - DynWriterImpl::ZStd(enc) => io::AsyncWrite::poll_shutdown(Pin::new(enc), cx), + DynWriterImpl::Zstd(enc) => io::AsyncWrite::poll_shutdown(Pin::new(enc), cx), } } } From 37e9534bae0d26038d99ae5e4621a39c6d0fca45 Mon Sep 17 00:00:00 2001 From: Carter Green Date: Mon, 26 Aug 2024 17:56:28 -0500 Subject: [PATCH 5/5] VER: Release 0.20.1 --- CHANGELOG.md | 2 +- Cargo.lock | 10 +++++----- Cargo.toml | 2 +- python/pyproject.toml | 4 ++-- rust/dbn-cli/Cargo.toml | 2 +- rust/dbn/Cargo.toml | 2 +- 6 files changed, 11 insertions(+), 11 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f1fcec8..1760124 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # Changelog -## 0.20.1 - TBD +## 0.20.1 - 2024-08-26 ### Enhancements - Added `DynAsyncBufWriter` for buffering compressed or uncompressed async output diff --git a/Cargo.lock b/Cargo.lock index 12b00fe..d1e1b5f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -256,7 +256,7 @@ dependencies = [ [[package]] name = "databento-dbn" -version = "0.20.0" +version = "0.20.1" dependencies = [ "dbn", "pyo3", @@ -267,7 +267,7 @@ dependencies = [ [[package]] name = "dbn" -version = "0.20.0" +version = "0.20.1" dependencies = [ "async-compression", "csv", @@ -289,7 +289,7 @@ dependencies = [ [[package]] name = "dbn-c" -version = "0.20.0" +version = "0.20.1" dependencies = [ "anyhow", "cbindgen", @@ -299,7 +299,7 @@ dependencies = [ [[package]] name = "dbn-cli" -version = "0.20.0" +version = "0.20.1" dependencies = [ "anyhow", "assert_cmd", @@ -314,7 +314,7 @@ dependencies = [ [[package]] name = "dbn-macros" -version = "0.20.0" +version = "0.20.1" dependencies = [ "csv", "dbn", diff --git a/Cargo.toml b/Cargo.toml index d19d16f..f0b10f2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -11,7 +11,7 @@ resolver = "2" [workspace.package] authors = ["Databento "] edition = "2021" -version = "0.20.0" +version = "0.20.1" documentation = "https://docs.databento.com" repository = "https://github.com/databento/dbn" license = "Apache-2.0" diff --git a/python/pyproject.toml b/python/pyproject.toml index ac968e2..fd43fbf 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "databento-dbn" -version = "0.20.0" +version = "0.20.1" description = "Python bindings for encoding and decoding Databento Binary Encoding (DBN)" authors = ["Databento "] license = "Apache-2.0" @@ -17,7 +17,7 @@ build-backend = "maturin" [project] name = "databento-dbn" -version = "0.20.0" +version = "0.20.1" authors = [ { name = "Databento", email = "support@databento.com" } ] diff --git a/rust/dbn-cli/Cargo.toml b/rust/dbn-cli/Cargo.toml index c686f23..b611658 100644 --- a/rust/dbn-cli/Cargo.toml +++ b/rust/dbn-cli/Cargo.toml @@ -16,7 +16,7 @@ name = "dbn" path = "src/main.rs" [dependencies] -dbn = { path = "../dbn", version = "=0.20.0", default-features = false } +dbn = { path = "../dbn", version = "=0.20.1", default-features = false } anyhow = { workspace = true } clap = { version = "4.5", features = ["derive", "wrap_help"] } diff --git a/rust/dbn/Cargo.toml b/rust/dbn/Cargo.toml index 84d5d49..48bc1e4 100644 --- a/rust/dbn/Cargo.toml +++ b/rust/dbn/Cargo.toml @@ -25,7 +25,7 @@ serde = ["dep:serde", "time/parsing", "time/serde"] trivial_copy = [] [dependencies] -dbn-macros = { version = "=0.20.0", path = "../dbn-macros" } +dbn-macros = { version = "=0.20.1", path = "../dbn-macros" } async-compression = { version = "0.4.11", features = ["tokio", "zstd"], optional = true } csv = { workspace = true }