Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6,490 changes: 3,539 additions & 2,951 deletions Cargo.lock

Large diffs are not rendered by default.

18 changes: 11 additions & 7 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ aws-sdk-kinesis = { version = "1", default-features = false, features = [
"rt-tokio",
"rustls",
] }
aws-sdk-s3 = { version = "1", default-features = false, features = [
aws-sdk-s3 = { version = "1.17", default-features = false, features = [
"rt-tokio",
"rustls",
] }
Expand Down Expand Up @@ -172,14 +172,14 @@ hashbrown0_15 = { package = "hashbrown", version = "0.15", features = [
] }
hytra = "0.1"
# branch dev_rebase_main_20250325
iceberg = { git = "https://github.com/risingwavelabs/iceberg-rust.git", rev = "1d79e119ffb30bda0fafcb3114a17c77f46dc6fd", features = [
iceberg = { git = "https://github.com/risingwavelabs/iceberg-rust.git", rev = "5cbc7f806afae67db489ae2dcc856e3271b40e54", features = [
"storage-s3",
"storage-gcs",
"storage-azblob",
"storage-azdls",
] }
iceberg-catalog-glue = { git = "https://github.com/risingwavelabs/iceberg-rust.git", rev = "1d79e119ffb30bda0fafcb3114a17c77f46dc6fd" }
iceberg-catalog-rest = { git = "https://github.com/risingwavelabs/iceberg-rust.git", rev = "1d79e119ffb30bda0fafcb3114a17c77f46dc6fd" }
iceberg-catalog-glue = { git = "https://github.com/risingwavelabs/iceberg-rust.git", rev = "5cbc7f806afae67db489ae2dcc856e3271b40e54" }
iceberg-catalog-rest = { git = "https://github.com/risingwavelabs/iceberg-rust.git", rev = "5cbc7f806afae67db489ae2dcc856e3271b40e54" }

indexmap = { version = "2.12.0", features = ["serde"] }
itertools = "0.14.0"
Expand All @@ -201,7 +201,7 @@ parking_lot = { version = "0.12", features = [
"arc_lock",
"deadlock_detection",
] }
parquet = { version = "54", features = ["async"] }
parquet = { version = "56", features = ["async"] }
pin-project-lite = "0.2"
prost = { version = "0.13" }
prost-build = { version = "0.13" }
Expand Down Expand Up @@ -261,12 +261,16 @@ risingwave_variables = { path = "./src/utils/variables" }
rw_futures_util = { path = "src/utils/futures_util" }
rw_iter_util = { path = "src/utils/iter_util" }
rw_resource_util = { path = "src/utils/resource_util" }
sea-orm = { version = "~1.1", features = [
sea-orm = { version = "1.1.19", features = [
"sqlx-all",
"runtime-tokio-native-tls",
"with-uuid",
] }
sea-orm-migration = "~1.1"
sea-orm-migration = { version = "1.1.19", features = [
"sqlx-postgres",
"sqlx-mysql",
"sqlx-sqlite",
] }
serde = { version = "1", features = ["derive"] }
smallvec = "1.15"
sqlx = { version = "0.8.2", default-features = false, features = [
Expand Down
4 changes: 4 additions & 0 deletions src/common/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,10 @@ arrow-55-array = { package = "arrow-array", version = "55" }
arrow-55-buffer = { package = "arrow-buffer", version = "55" }
arrow-55-cast = { package = "arrow-cast", version = "55" }
arrow-55-schema = { package = "arrow-schema", version = "55" }
arrow-56-array = { package = "arrow-array", version = "56" }
arrow-56-buffer = { package = "arrow-buffer", version = "56" }
arrow-56-cast = { package = "arrow-cast", version = "56" }
arrow-56-schema = { package = "arrow-schema", version = "56" }
async-trait = "0.1"
auto_enums = { workspace = true }
auto_impl = "1"
Expand Down
2 changes: 1 addition & 1 deletion src/common/src/array/arrow/arrow_54.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
#[path = "./arrow_impl.rs"]
mod arrow_impl;
type ArrowIntervalType = arrow_buffer::IntervalMonthDayNano;
pub use arrow_impl::{FromArrow, ToArrow, is_parquet_schema_match_source_schema};
pub use arrow_impl::{FromArrow, ToArrow};
pub use {
arrow_54_array as arrow_array, arrow_54_buffer as arrow_buffer, arrow_54_cast as arrow_cast,
arrow_54_schema as arrow_schema,
Expand Down
23 changes: 23 additions & 0 deletions src/common/src/array/arrow/arrow_56.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
// Copyright 2025 RisingWave Labs
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#[allow(clippy::duplicate_mod)]
#[path = "./arrow_impl.rs"]
mod arrow_impl;
type ArrowIntervalType = arrow_buffer::IntervalMonthDayNano;
pub use arrow_impl::{FromArrow, ToArrow, is_parquet_schema_match_source_schema};
pub use {
arrow_56_array as arrow_array, arrow_56_buffer as arrow_buffer, arrow_56_cast as arrow_cast,
arrow_56_schema as arrow_schema,
};
2 changes: 1 addition & 1 deletion src/common/src/array/arrow/arrow_iceberg.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ use std::sync::Arc;
use arrow_array::ArrayRef;
use num_traits::abs;

pub use super::arrow_54::{
pub use super::arrow_56::{
FromArrow, ToArrow, arrow_array, arrow_buffer, arrow_cast, arrow_schema,
is_parquet_schema_match_source_schema,
};
Expand Down
1 change: 1 addition & 0 deletions src/common/src/array/arrow/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@

mod arrow_54;
mod arrow_55;
mod arrow_56;
// These mods import mods above and may override some methods.
mod arrow_deltalake;
mod arrow_iceberg;
Expand Down
6 changes: 4 additions & 2 deletions src/common/src/row/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -312,12 +312,14 @@ macro_rules! impl_slice_row {

#[inline]
fn len(&self) -> usize {
self.as_ref().len()
AsRef::<[D]>::as_ref(self).len()
}

#[inline]
fn iter(&self) -> impl Iterator<Item = DatumRef<'_>> {
self.as_ref().iter().map(ToDatumRef::to_datum_ref)
AsRef::<[D]>::as_ref(self)
.iter()
.map(ToDatumRef::to_datum_ref)
}
};
}
Expand Down
4 changes: 2 additions & 2 deletions src/connector/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -121,15 +121,15 @@ prometheus = { version = "0.14", features = ["process"] }
prost = { workspace = true, features = ["no-recursion-limit"] }
prost-reflect = { version = "0.15", features = ["serde"] }
prost-types = "0.13"
prost-types014 = { package = "prost-types", version = "0.14" }
prost014 = { package = "prost", version = "0.14" }
pulsar = { version = "6.3", default-features = false, features = [
"tokio-runtime",
"telemetry",
"auth-oauth2",
"lz4",
"zstd",
] }
# should align with pulsar's prost version
pulsar-prost = { package = "prost", version = "0.11.9" }
rdkafka = { workspace = true, features = [
"cmake-build",
"ssl",
Expand Down
11 changes: 11 additions & 0 deletions src/connector/src/connector_common/iceberg/jni_catalog.rs
Original file line number Diff line number Diff line change
Expand Up @@ -401,6 +401,17 @@ impl Catalog for JniCatalog {
})?
}

async fn register_table(
&self,
_table_ident: &TableIdent,
_metadata_location: String,
) -> iceberg::Result<Table> {
Err(iceberg::Error::new(
iceberg::ErrorKind::Unexpected,
"register_table is not supported by JniCatalog",
))
}

/// Check if a table exists in the catalog.
async fn table_exists(&self, table: &TableIdent) -> iceberg::Result<bool> {
execute_with_jni_env(self.jvm, |env| {
Expand Down
12 changes: 12 additions & 0 deletions src/connector/src/connector_common/iceberg/mock_catalog.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ impl MockCatalog {
partition_spec: Some(partition_spec),
sort_order: None,
properties: HashMap::new(),
format_version: iceberg::spec::FormatVersion::V2,
};
Table::builder()
.identifier(TableIdent::new(
Expand Down Expand Up @@ -244,4 +245,15 @@ impl CatalogV2 for MockCatalog {
async fn update_table(&self, _commit: TableCommit) -> iceberg::Result<Table> {
todo!()
}

async fn register_table(
&self,
_table_ident: &TableIdent,
_metadata_location: String,
) -> iceberg::Result<Table> {
Err(iceberg::Error::new(
iceberg::ErrorKind::Unexpected,
"register_table is not supported in mock catalog",
))
}
}
60 changes: 33 additions & 27 deletions src/connector/src/connector_common/iceberg/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ use ::iceberg::io::{
S3_ACCESS_KEY_ID, S3_ASSUME_ROLE_ARN, S3_ENDPOINT, S3_REGION, S3_SECRET_ACCESS_KEY,
};
use ::iceberg::table::Table;
use ::iceberg::{Catalog, TableIdent};
use ::iceberg::{Catalog, CatalogBuilder, TableIdent};
use anyhow::{Context, anyhow};
use iceberg::io::{
ADLS_ACCOUNT_KEY, ADLS_ACCOUNT_NAME, AZBLOB_ACCOUNT_KEY, AZBLOB_ACCOUNT_NAME, AZBLOB_ENDPOINT,
Expand Down Expand Up @@ -667,20 +667,22 @@ impl IcebergCommon {
iceberg_configs.insert(format!("header.{}", header_name), header_value);
}

let config_builder =
iceberg_catalog_rest::RestCatalogConfig::builder()
.uri(self.catalog_uri.clone().with_context(|| {
"`catalog.uri` must be set in rest catalog".to_owned()
})?)
.props(iceberg_configs);

let config = match &self.warehouse_path {
Some(warehouse_path) => {
config_builder.warehouse(warehouse_path.clone()).build()
}
None => config_builder.build(),
};
let catalog = iceberg_catalog_rest::RestCatalog::new(config);
iceberg_configs.insert(
iceberg_catalog_rest::REST_CATALOG_PROP_URI.to_owned(),
self.catalog_uri
.clone()
.with_context(|| "`catalog.uri` must be set in rest catalog".to_owned())?,
);
if let Some(warehouse_path) = &self.warehouse_path {
iceberg_configs.insert(
iceberg_catalog_rest::REST_CATALOG_PROP_WAREHOUSE.to_owned(),
warehouse_path.clone(),
);
}
let catalog = iceberg_catalog_rest::RestCatalogBuilder::default()
.load("rest", iceberg_configs)
.await
.map_err(|e: iceberg::Error| anyhow!(e))?;
Ok(Arc::new(catalog))
}
"glue_rust" => {
Expand Down Expand Up @@ -717,18 +719,22 @@ impl IcebergCommon {
path_style_access.to_string(),
);
}
let config_builder =
iceberg_catalog_glue::GlueCatalogConfig::builder()
.warehouse(self.warehouse_path.clone().ok_or_else(|| {
anyhow!("`warehouse.path` must be set in glue catalog")
})?)
.props(iceberg_configs);
let config = if let Some(uri) = self.catalog_uri.as_deref() {
config_builder.uri(uri.to_owned()).build()
} else {
config_builder.build()
};
let catalog = iceberg_catalog_glue::GlueCatalog::new(config).await?;
iceberg_configs.insert(
iceberg_catalog_glue::GLUE_CATALOG_PROP_WAREHOUSE.to_owned(),
self.warehouse_path
.clone()
.ok_or_else(|| anyhow!("`warehouse.path` must be set in glue catalog"))?,
);
if let Some(uri) = self.catalog_uri.as_deref() {
iceberg_configs.insert(
iceberg_catalog_glue::GLUE_CATALOG_PROP_URI.to_owned(),
uri.to_owned(),
);
}
let catalog = iceberg_catalog_glue::GlueCatalogBuilder::default()
.load("glue", iceberg_configs)
.await
.map_err(|e: iceberg::Error| anyhow!(e))?;
Ok(Arc::new(catalog))
}
catalog_type
Expand Down
13 changes: 12 additions & 1 deletion src/connector/src/connector_common/iceberg/storage_catalog.rs
Original file line number Diff line number Diff line change
Expand Up @@ -335,7 +335,7 @@ impl Catalog for StorageCatalog {
let table = self.load_table(table).await?;
table
.file_io()
.remove_all(table.metadata().location())
.remove_dir_all(table.metadata().location())
.await
}

Expand Down Expand Up @@ -380,4 +380,15 @@ impl Catalog for StorageCatalog {

self.load_table(commit.identifier()).await
}

async fn register_table(
&self,
_table_ident: &TableIdent,
_metadata_location: String,
) -> iceberg::Result<Table> {
Err(Error::new(
ErrorKind::Unexpected,
"register_table is not supported in storage catalog",
))
}
}
15 changes: 12 additions & 3 deletions src/connector/src/sink/big_query.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ use gcp_bigquery_client::model::table_field_schema::TableFieldSchema;
use gcp_bigquery_client::model::table_schema::TableSchema;
use google_cloud_bigquery::grpc::apiv1::conn_pool::ConnectionManager;
use google_cloud_gax::conn::{ConnectionOptions, Environment};
use google_cloud_gax::grpc::Request;
use google_cloud_gax::grpc::{Request, Response, Status};
use google_cloud_googleapis::cloud::bigquery::storage::v1::append_rows_request::{
MissingValueInterpretation, ProtoData, Rows as AppendRowsRequestRows,
};
Expand All @@ -46,14 +46,17 @@ use prost_types::{
DescriptorProto, FieldDescriptorProto, FileDescriptorProto, FileDescriptorSet,
field_descriptor_proto,
};
use prost_types014::DescriptorProto as DescriptorProto014;
use prost::Message;
use prost014::Message as Message014;
use risingwave_common::array::{Op, StreamChunk};
use risingwave_common::catalog::{Field, Schema};
use risingwave_common::types::DataType;
use serde::Deserialize;
use serde_with::{DisplayFromStr, serde_as};
use simd_json::prelude::ArrayTrait;
use tokio::sync::mpsc;
use tonic::{Response, Status, async_trait};
use async_trait::async_trait;
use url::Url;
use uuid::Uuid;
use with_options::WithOptions;
Expand Down Expand Up @@ -683,7 +686,7 @@ impl BigQuerySinkWriter {
message_descriptor,
proto_field,
writer_pb_schema: ProtoSchema {
proto_descriptor: Some(descriptor_proto),
proto_descriptor: Some(to_gcloud_descriptor(&descriptor_proto)?),
},
},
resp_stream,
Expand Down Expand Up @@ -898,6 +901,12 @@ fn build_protobuf_descriptor_pool(desc: &DescriptorProto) -> Result<prost_reflec
.map_err(SinkError::BigQuery)
}

fn to_gcloud_descriptor(desc: &DescriptorProto) -> Result<DescriptorProto014> {
let bytes = Message::encode_to_vec(desc);
Message014::decode(bytes.as_slice())
.map_err(|e| SinkError::BigQuery(anyhow!(e).context("failed to convert descriptor proto")))
}

fn build_protobuf_schema<'a>(
fields: impl Iterator<Item = (&'a str, &'a DataType)>,
name: String,
Expand Down
2 changes: 1 addition & 1 deletion src/connector/src/sink/google_pubsub.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ use risingwave_common::array::StreamChunk;
use risingwave_common::catalog::Schema;
use serde::Deserialize;
use serde_with::serde_as;
use tonic::Status;
use google_cloud_gax::grpc::Status;
use with_options::WithOptions;

use super::catalog::SinkFormatDesc;
Expand Down
Loading
Loading