Skip to content

Commit

Permalink
Fix backup statistics for leaf partitions (#42)
Browse files Browse the repository at this point in the history
gpbackup does not backup statistics for leaf partitions with
leaf-partition-data option

This patch solves the problem by backuping statistics for all specified
tables, including leaf partitions, and not just root partitions.

The test for this patch is based on the test for the 85384fa.
And, as stated in that patch, it removes the unnecessary
condition when comparing the number of statistics.

Cherry-picked-from: 0ce7382
  • Loading branch information
RekGRpth committed Dec 28, 2023
1 parent df1c983 commit db9689d
Show file tree
Hide file tree
Showing 4 changed files with 10 additions and 20 deletions.
4 changes: 2 additions & 2 deletions backup/backup.go
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ func DoBackup() {
}

gplog.Info("Gathering table state information")
metadataTables, dataTables := RetrieveAndProcessTables()
metadataTables, dataTables, allTables := RetrieveAndProcessTables()
dataTables, numExtOrForeignTables := GetBackupDataSet(dataTables)
if len(dataTables) == 0 && !backupReport.MetadataOnly {
gplog.Warn("No tables in backup set contain data. Performing metadata-only backup instead.")
Expand Down Expand Up @@ -195,7 +195,7 @@ func DoBackup() {

printDataBackupWarnings(numExtOrForeignTables)
if MustGetFlagBool(options.WITH_STATS) {
backupStatistics(metadataTables)
backupStatistics(allTables)
}

globalTOC.WriteToFileAndMakeReadOnly(globalFPInfo.GetTOCFilePath())
Expand Down
8 changes: 4 additions & 4 deletions backup/wrappers.go
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,7 @@ func createBackupDirectoriesOnAllHosts() {
* Metadata retrieval wrapper functions
*/

func RetrieveAndProcessTables() ([]Table, []Table) {
func RetrieveAndProcessTables() ([]Table, []Table, []Table) {
includedRelations := GetIncludedUserTableRelations(connectionPool, IncludedRelationFqns)
tableRelations := ConvertRelationsOptionsToBackup(includedRelations)

Expand All @@ -215,12 +215,12 @@ func RetrieveAndProcessTables() ([]Table, []Table) {
tableRelations = append(tableRelations, GetForeignTableRelations(connectionPool)...)
}

tables := ConstructDefinitionsForTables(connectionPool, tableRelations)
allTables := ConstructDefinitionsForTables(connectionPool, tableRelations)

metadataTables, dataTables := SplitTablesByPartitionType(tables, IncludedRelationFqns)
metadataTables, dataTables := SplitTablesByPartitionType(allTables, IncludedRelationFqns)
objectCounts["Tables"] = len(metadataTables)

return metadataTables, dataTables
return metadataTables, dataTables, allTables
}

func retrieveFunctions(sortables *[]Sortable, metadataMap MetadataMap) ([]Function, map[uint32]FunctionInfo) {
Expand Down
16 changes: 3 additions & 13 deletions end_to_end/end_to_end_suite_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -1527,13 +1527,6 @@ var _ = Describe("backup and restore end to end tests", func() {
skipIfOldBackupVersionBefore("1.18.0")

testhelper.AssertQueryRuns(backupConn, `
CREATE TABLE et (
id character varying(13),
flg smallint,
dttm timestamp without time zone,
src character varying(80)
) WITH (appendonly='true', orientation='row', compresstype=zstd, compresslevel='3') DISTRIBUTED BY (id);
CREATE TABLE pt (
id character varying(13),
flg smallint,
Expand All @@ -1544,19 +1537,16 @@ var _ = Describe("backup and restore end to end tests", func() {
);
INSERT INTO pt(id, flg, dttm, src) VALUES (1, 1, now(), 'val');
INSERT INTO et(id, flg, dttm, src) VALUES (2, 2, now(), 'val');
ANALYZE pt;
ANALYZE et;
ANALYZE ROOTPARTITION pt;
ALTER TABLE pt EXCHANGE PARTITION src_mdm WITH TABLE et;
`)

defer testhelper.AssertQueryRuns(backupConn,
`DROP TABLE et CASCADE; DROP TABLE pt CASCADE;`)
`DROP TABLE pt CASCADE;`)
timestamp := gpbackup(gpbackupPath, backupHelperPath,
"--with-stats",
"--leaf-partition-data",
"--backup-dir", backupDir, "--single-backup-dir")
files, err := path.Glob(path.Join(backupDir, "backups/*",
timestamp, "*statistics.sql"))
Expand All @@ -1574,7 +1564,7 @@ var _ = Describe("backup and restore end to end tests", func() {
assertPGClassStatsRestored(backupConn, restoreConn, publicSchemaTupleCounts)
assertPGClassStatsRestored(backupConn, restoreConn, schema2TupleCounts)

statsQuery := fmt.Sprintf(`SELECT count(*) AS string FROM pg_statistic st left join pg_class cl on st.starelid = cl.oid left join pg_namespace nm on cl.relnamespace = nm.oid where cl.relname != 'pt_1_prt_src_mdm' AND %s;`, backup.SchemaFilterClause("nm"))
statsQuery := fmt.Sprintf(`SELECT count(*) AS string FROM pg_statistic st left join pg_class cl on st.starelid = cl.oid left join pg_namespace nm on cl.relnamespace = nm.oid where %s;`, backup.SchemaFilterClause("nm"))
backupStatisticCount := dbconn.MustSelectString(backupConn, statsQuery)
restoredStatisticsCount := dbconn.MustSelectString(restoreConn, statsQuery)

Expand Down
2 changes: 1 addition & 1 deletion integration/wrappers_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ var _ = Describe("Wrappers Integration", func() {
connectionPool.MustBegin(0)
defer connectionPool.MustCommit(0)

_, dataTables := backup.RetrieveAndProcessTables()
_, dataTables, _ := backup.RetrieveAndProcessTables()
Expect(len(dataTables)).To(Equal(2))
Expect(dataTables[0].Name).To(Equal("foo"))
Expect(dataTables[1].Name).To(Equal(`"BAR"`))
Expand Down

0 comments on commit db9689d

Please sign in to comment.