diff --git a/store/postgres/src/relational.rs b/store/postgres/src/relational.rs
index de7e6895083..efb5776adf1 100644
--- a/store/postgres/src/relational.rs
+++ b/store/postgres/src/relational.rs
@@ -1715,7 +1715,7 @@ impl Table {
pub fn new_like(&self, namespace: &Namespace, name: &SqlName) -> Arc
{
let other = Table {
object: self.object.clone(),
- nsp: self.nsp.clone(),
+ nsp: namespace.clone(),
name: name.clone(),
qualified_name: SqlName::qualified_name(namespace, name),
columns: self.columns.clone(),
diff --git a/store/postgres/src/relational/ddl.rs b/store/postgres/src/relational/ddl.rs
index 40a02d6051e..55e116272d1 100644
--- a/store/postgres/src/relational/ddl.rs
+++ b/store/postgres/src/relational/ddl.rs
@@ -403,13 +403,7 @@ impl Table {
if index_def.is_some() && ENV_VARS.postpone_attribute_index_creation {
let arr = index_def
.unwrap()
- .indexes_for_table(
- &catalog.site.namespace,
- &self.name.to_string(),
- &self,
- false,
- false,
- )
+ .indexes_for_table(&self.nsp, &self.name.to_string(), &self, false, false)
.map_err(|_| fmt::Error)?;
for (_, sql) in arr {
writeln!(out, "{};", sql).expect("properly formated index statements")
diff --git a/store/postgres/src/relational/index.rs b/store/postgres/src/relational/index.rs
index 6013a5d9e68..4f72e773ee6 100644
--- a/store/postgres/src/relational/index.rs
+++ b/store/postgres/src/relational/index.rs
@@ -440,7 +440,7 @@ impl CreateIndex {
}
}
- fn with_nsp(&self, nsp2: String) -> Result {
+ pub fn with_nsp(&self, nsp2: String) -> Result {
let s = self.clone();
match s {
CreateIndex::Unknown { defn: _ } => Err(anyhow!("Failed to parse the index")),
@@ -734,6 +734,16 @@ pub struct IndexList {
pub(crate) indexes: HashMap>,
}
+pub fn load_indexes_from_table(
+ conn: &mut PgConnection,
+ table: &Arc,
+ schema_name: &str,
+) -> Result, StoreError> {
+ let table_name = table.name.as_str();
+ let indexes = catalog::indexes_for_table(conn, schema_name, table_name)?;
+ Ok(indexes.into_iter().map(CreateIndex::parse).collect())
+}
+
impl IndexList {
pub fn load(
conn: &mut PgConnection,
@@ -746,10 +756,8 @@ impl IndexList {
let schema_name = site.namespace.clone();
let layout = store.layout(conn, site)?;
for (_, table) in &layout.tables {
- let table_name = table.name.as_str();
- let indexes = catalog::indexes_for_table(conn, schema_name.as_str(), table_name)?;
- let collect: Vec = indexes.into_iter().map(CreateIndex::parse).collect();
- list.indexes.insert(table_name.to_string(), collect);
+ let indexes = load_indexes_from_table(conn, table, schema_name.as_str())?;
+ list.indexes.insert(table.name.to_string(), indexes);
}
Ok(list)
}
diff --git a/store/postgres/src/relational/prune.rs b/store/postgres/src/relational/prune.rs
index 62632549397..5c3035ce172 100644
--- a/store/postgres/src/relational/prune.rs
+++ b/store/postgres/src/relational/prune.rs
@@ -1,4 +1,4 @@
-use std::{fmt::Write, sync::Arc};
+use std::{collections::HashMap, fmt::Write, sync::Arc};
use diesel::{
connection::SimpleConnection,
@@ -23,7 +23,10 @@ use crate::{
vid_batcher::{VidBatcher, VidRange},
};
-use super::{Catalog, Layout, Namespace};
+use super::{
+ index::{load_indexes_from_table, CreateIndex, IndexList},
+ Catalog, Layout, Namespace,
+};
/// Utility to copy relevant data out of a source table and into a new
/// destination table and replace the source table with the destination
@@ -56,9 +59,18 @@ impl TablePair {
if catalog::table_exists(conn, dst_nsp.as_str(), &dst.name)? {
writeln!(query, "truncate table {};", dst.qualified_name)?;
} else {
+ let mut list = IndexList {
+ indexes: HashMap::new(),
+ };
+ let indexes = load_indexes_from_table(conn, &src, src_nsp.as_str())?
+ .into_iter()
+ .map(|index| index.with_nsp(dst_nsp.to_string()))
+ .collect::, _>>()?;
+ list.indexes.insert(src.name.to_string(), indexes);
+
// In case of pruning we don't do delayed creation of indexes,
// as the asumption is that there is not that much data inserted.
- dst.as_ddl(schema, catalog, None, &mut query)?;
+ dst.as_ddl(schema, catalog, Some(&list), &mut query)?;
}
conn.batch_execute(&query)?;