Skip to content

Commit

Permalink
Upgrade to pgrx v0.12.9 and support Postgres 17
Browse files Browse the repository at this point in the history
Replace all uses of `u32` for OIDs with `pgrx::pg_sys::Oid`, as
`IntoDatum` no longer has an implementation for `u32`, as of
pgcentralfoundation/pgrx#1354.

Update `source_column()`'s return value to omit errors for each column,
as that pattern is no longer supported as of
pgcentralfoundation/pgrx#1708.
  • Loading branch information
theory committed Dec 17, 2024
1 parent 72b8688 commit 631e194
Show file tree
Hide file tree
Showing 8 changed files with 47 additions and 48 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/extension_release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
pg-version: [14, 15, 16]
pg-version: [14, 15, 16, 17]
steps:
- uses: actions/checkout@v2
- name: Install Rust stable toolchain
Expand Down
7 changes: 4 additions & 3 deletions extension/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,20 +4,21 @@ version = "0.0.5" # Extension version (SemVer: MAJOR.MINOR.PATCH)
edition = "2021" # Rust 2021 edition

[lib]
crate-type = ["cdylib"]
crate-type = ["cdylib", "lib"]

[features]
default = ["pg16"]
pg14 = ["pgrx/pg14", "pgrx-tests/pg14" ]
pg15 = ["pgrx/pg15", "pgrx-tests/pg15" ]
pg16 = ["pgrx/pg16", "pgrx-tests/pg16" ]
pg17 = ["pgrx/pg17", "pgrx-tests/pg17" ]
pg_test = []

# Custom features
experimental = []

[dependencies]
pgrx = "=0.11.4"
pgrx = "0.12.9"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
reqwest = { version = "0.11", features = ["json"] }
Expand All @@ -30,7 +31,7 @@ sha2 = "0.10"
hex = "0.4"

[dev-dependencies]
pgrx-tests = "=0.11.4"
pgrx-tests = "0.12.9"

[profile.dev]
panic = "unwind"
Expand Down
1 change: 1 addition & 0 deletions extension/src/bin/pgrx_embed_pg_auto_dw.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
::pgrx::pgrx_embed!();
4 changes: 2 additions & 2 deletions extension/src/controller/bgw_transformer_client.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use pgrx::bgworkers::*;
use pgrx::prelude::*;
use pgrx::{prelude::*, pg_sys::Oid};

use std::time::Duration;
use std::collections::HashMap;
Expand Down Expand Up @@ -39,7 +39,7 @@ pub extern "C" fn background_worker_transformer_client(_arg: pg_sys::Datum) {
let mut v_source_table_prompts: Vec<source_objects::SourceTablePrompt> = Vec::new();
for source_object_json in source_objects_json {

let table_oid = source_object_json.get_datum_by_ordinal(1)?.value::<u32>()?.unwrap();
let table_oid = source_object_json.get_datum_by_ordinal(1)?.value::<Oid>()?.unwrap();
let table_column_links = source_object_json.get_datum_by_ordinal(2)?.value::<pgrx::Json>()?.unwrap();
let table_details = source_object_json.get_datum_by_ordinal(3)?.value::<pgrx::Json>()?.unwrap();

Expand Down
24 changes: 12 additions & 12 deletions extension/src/controller/dv_builder.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use pgrx::prelude::*;
use pgrx::{prelude::*, pg_sys::Oid};
use uuid::Uuid;
use std::collections::HashMap;
use chrono::Utc;
Expand All @@ -19,7 +19,7 @@ use super::dv_loader::*;

pub fn build_dv(build_id: Uuid, dv_objects_query: &str, load_data: bool) {

let mut dv_objects_hm: HashMap<u32, Vec<TransformerObject>> = HashMap::new();
let mut dv_objects_hm: HashMap<Oid, Vec<TransformerObject>> = HashMap::new();

Spi::connect(|client|
{
Expand All @@ -38,7 +38,7 @@ pub fn build_dv(build_id: Uuid, dv_objects_query: &str, load_data: bool) {
let column_name = dv_object.get_datum_by_ordinal(5).unwrap().value::<String>().unwrap().unwrap();
let column_type_name = dv_object.get_datum_by_ordinal(6).unwrap().value::<String>().unwrap().unwrap();
let system_id = dv_object.get_datum_by_ordinal(7).unwrap().value::<i64>().unwrap().unwrap();
let table_oid: u32 = dv_object.get_datum_by_ordinal(8).unwrap().value::<u32>().unwrap().unwrap();
let table_oid: Oid = dv_object.get_datum_by_ordinal(8).unwrap().value::<Oid>().unwrap().unwrap();
let column_ordinal_position = dv_object.get_datum_by_ordinal(9).unwrap().value::<i16>().unwrap().unwrap();

let column_category = ColumnCategory::from_str(&column_category);
Expand Down Expand Up @@ -332,7 +332,7 @@ fn insert_dw_source_columns(dv_schema: &DVSchema) {
let table_oid = column.0;
let column_ordinal_position = column.1;

log!("DV'd Table: {table_oid}, Col: {column_ordinal_position}");
log!("DV'd Table: {}, Col: {column_ordinal_position}", table_oid.as_u32());

Spi::connect( |mut client| {
_ = client.update(insert_dw_source_column, None,
Expand Down Expand Up @@ -413,10 +413,10 @@ fn business_key_part_link_add_hub_target_column(schema_name: &String, business_k
// Only 0 or 1 record should be returned.
if let Some(column_data_record) = column_data.into_iter().next() {
let system_id = column_data_record.get_datum_by_ordinal(1).unwrap().value::<i64>().unwrap().unwrap();
let _schema_oid = column_data_record.get_datum_by_ordinal(2).unwrap().value::<u32>().unwrap().unwrap();
let _schema_oid = column_data_record.get_datum_by_ordinal(2).unwrap().value::<Oid>().unwrap().unwrap();
let schema_name = column_data_record.get_datum_by_ordinal(3).unwrap().value::<String>().unwrap().unwrap();
let table_name = column_data_record.get_datum_by_ordinal(4).unwrap().value::<String>().unwrap().unwrap();
let table_oid = column_data_record.get_datum_by_ordinal(5).unwrap().value::<u32>().unwrap().unwrap();
let table_oid = column_data_record.get_datum_by_ordinal(5).unwrap().value::<Oid>().unwrap().unwrap();
let column_name = column_data_record.get_datum_by_ordinal(6).unwrap().value::<String>().unwrap().unwrap();
let column_ordinal_position = column_data_record.get_datum_by_ordinal(7).unwrap().value::<i16>().unwrap().unwrap();
let column_type_name = column_data_record.get_datum_by_ordinal(8).unwrap().value::<String>().unwrap().unwrap();
Expand Down Expand Up @@ -467,10 +467,10 @@ fn descriptor_add_target_columns(schema_name: &String, descriptor: &mut Descript
// Only 0 or 1 record should be returned.
if let Some(column_data_record) = column_data.into_iter().next() {
let system_id = column_data_record.get_datum_by_ordinal(1).unwrap().value::<i64>().unwrap().unwrap();
let _schema_oid = column_data_record.get_datum_by_ordinal(2).unwrap().value::<u32>().unwrap().unwrap();
let _schema_oid = column_data_record.get_datum_by_ordinal(2).unwrap().value::<Oid>().unwrap().unwrap();
let schema_name = column_data_record.get_datum_by_ordinal(3).unwrap().value::<String>().unwrap().unwrap();
let table_name = column_data_record.get_datum_by_ordinal(4).unwrap().value::<String>().unwrap().unwrap();
let table_oid = column_data_record.get_datum_by_ordinal(5).unwrap().value::<u32>().unwrap().unwrap();
let table_oid = column_data_record.get_datum_by_ordinal(5).unwrap().value::<Oid>().unwrap().unwrap();
let column_name = column_data_record.get_datum_by_ordinal(6).unwrap().value::<String>().unwrap().unwrap();
let column_ordinal_position = column_data_record.get_datum_by_ordinal(7).unwrap().value::<i16>().unwrap().unwrap();
let column_type_name = column_data_record.get_datum_by_ordinal(8).unwrap().value::<String>().unwrap().unwrap();
Expand Down Expand Up @@ -708,16 +708,16 @@ struct TransformerObject {
column_name: String,
column_type_name: String,
system_id: i64,
table_oid: u32,
table_oid: Oid,
column_ordinal_position: i16,
column_category: ColumnCategory,
}

// Separates TransformerObject with multiple business key parts
fn separate_by_business_parts(dv_objects_hm_single_bkp: HashMap<u32, Vec<TransformerObject>>) -> (HashMap<u32, Vec<TransformerObject>>, HashMap<u32, Vec<TransformerObject>>) {
fn separate_by_business_parts(dv_objects_hm_single_bkp: HashMap<Oid, Vec<TransformerObject>>) -> (HashMap<Oid, Vec<TransformerObject>>, HashMap<Oid, Vec<TransformerObject>>) {

let mut single_business_key_part: HashMap<u32, Vec<TransformerObject>> = HashMap::new();
let mut multiple_business_key_parts: HashMap<u32, Vec<TransformerObject>> = HashMap::new();
let mut single_business_key_part: HashMap<Oid, Vec<TransformerObject>> = HashMap::new();
let mut multiple_business_key_parts: HashMap<Oid, Vec<TransformerObject>> = HashMap::new();

for (table_oid, transformer_objects) in dv_objects_hm_single_bkp {
let business_key_count = transformer_objects.iter()
Expand Down
34 changes: 17 additions & 17 deletions extension/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -116,14 +116,14 @@ fn source_column() -> Result<
TableIterator<
'static,
(
name!(schema, Result<Option<String>, pgrx::spi::Error>),
name!(table, Result<Option<String>, pgrx::spi::Error>),
name!(column, Result<Option<String>, pgrx::spi::Error>),
name!(status, Result<Option<String>, pgrx::spi::Error>),
name!(category, Result<Option<String>, pgrx::spi::Error>),
name!(is_sensitive, Result<Option<String>, pgrx::spi::Error>),
name!(confidence_level, Result<Option<String>, pgrx::spi::Error>),
name!(status_response, Result<Option<String>, pgrx::spi::Error>),
name!(schema, Option<String>),
name!(table, Option<String>),
name!(column, Option<String>),
name!(status, Option<String>),
name!(category, Option<String>),
name!(is_sensitive, Option<String>),
name!(confidence_level, Option<String>),
name!(status_response, Option<String>),
)
>,
spi::Error,
Expand All @@ -142,14 +142,14 @@ fn source_column() -> Result<
.select(query, None, None)?
.map(|row| {
(
row["schema"].value(),
row["table"].value(),
row["column"].value(),
row["status"].value(),
row["category"].value(),
row["is_sensitive"].value(),
row["confidence_level"].value(),
row["status_response"].value(),
row["schema"].value().ok().flatten(),
row["table"].value().ok().flatten(),
row["column"].value().ok().flatten(),
row["status"].value().ok().flatten(),
row["category"].value().ok().flatten(),
row["is_sensitive"].value().ok().flatten(),
row["confidence_level"].value().ok().flatten(),
row["status_response"].value().ok().flatten(),
)
})
.collect::<Vec<_>>())
Expand All @@ -170,7 +170,7 @@ mod tests {

// TODO: Unit Testing
#[pg_test]
fn go_default() {
fn test_go_default() {
}

}
Expand Down
19 changes: 8 additions & 11 deletions extension/src/model/dv_schema.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
use uuid::Uuid;
use chrono::NaiveDateTime;
use pgrx::pg_sys::Oid;
use serde::{Deserialize, Serialize};
use uuid::Uuid;

#[derive(Serialize, Deserialize, Debug)]
pub struct DVSchema {
Expand All @@ -19,9 +20,8 @@ pub struct DVSchema {
}

impl DVSchema {

pub fn get_columns(&self) -> Vec<(u32, i16)> {
let mut columns: Vec<(u32, i16)> = Vec::new();
pub fn get_columns(&self) -> Vec<(Oid, i16)> {
let mut columns: Vec<(Oid, i16)> = Vec::new();
for link_key in &self.link_keys {
for business_key in &link_key.business_keys {
columns.append(&mut business_key.get_columns());
Expand All @@ -41,7 +41,6 @@ impl DVSchema {
}
}


#[derive(Serialize, Deserialize, Debug)]
pub struct LinkKey {
#[serde(rename = "ID")]
Expand Down Expand Up @@ -79,10 +78,8 @@ pub struct BusinessKeyPartLink {
}

impl BusinessKey {

pub fn get_columns(&self) -> Vec<(u32, i16)> {

let mut columns: Vec<(u32, i16)> = Vec::new();
pub fn get_columns(&self) -> Vec<(Oid, i16)> {
let mut columns: Vec<(Oid, i16)> = Vec::new();
// BK Part Search
for bkp_link in &self.business_key_part_links {
for source_column in &bkp_link.source_columns {
Expand Down Expand Up @@ -132,7 +129,7 @@ pub struct ColumnData {
#[serde(rename = "Schema Name")]
pub schema_name: String,
#[serde(rename = "Table OID")]
pub table_oid: u32,
pub table_oid: Oid,
#[serde(rename = "Table Name")]
pub table_name: String,
#[serde(rename = "Column Name")]
Expand All @@ -144,7 +141,7 @@ pub struct ColumnData {
}

impl ColumnData {
pub fn get_column(&self) -> (u32, i16) {
pub fn get_column(&self) -> (Oid, i16) {
(self.table_oid, self.column_ordinal_position)
}
}
4 changes: 2 additions & 2 deletions extension/src/model/source_objects.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
use pgrx::Json as JsonValue;
use pgrx::{Json as JsonValue, pg_sys::Oid};
use serde::{Deserialize, Deserializer, Serialize};

#[derive(Debug)]
pub struct SourceTablePrompt {
#[allow(dead_code)]
pub key: u32,
pub key: Oid,
pub table_column_links: JsonValue, // For linking columns to foreign keys
pub table_details: JsonValue,
}
Expand Down

0 comments on commit 631e194

Please sign in to comment.