Skip to content

Commit

Permalink
Revert "rewrite"
Browse files Browse the repository at this point in the history
This reverts commit 557c13e.
  • Loading branch information
RekGRpth committed May 20, 2024
1 parent 557c13e commit 4944142
Show file tree
Hide file tree
Showing 2 changed files with 18 additions and 45 deletions.
31 changes: 8 additions & 23 deletions backup/queries_statistics.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@ import (
"github.com/greenplum-db/gp-common-go-libs/dbconn"
"github.com/greenplum-db/gp-common-go-libs/gplog"
"github.com/greenplum-db/gpbackup/utils"
"github.com/jmoiron/sqlx"
"github.com/lib/pq"
)

Expand Down Expand Up @@ -54,7 +53,7 @@ type AttributeStatistic struct {
Values5 pq.StringArray `db:"stavalues5"`
}

func GetAttributeStatisticsRows(connectionPool *dbconn.DBConn, tables []Table) (*sqlx.Rows, error) {
func GetAttributeStatistics(connectionPool *dbconn.DBConn, tables []Table) map[uint32][]AttributeStatistic {
inheritClause := ""
statSlotClause := ""
if connectionPool.Version.AtLeast("6") {
Expand Down Expand Up @@ -120,20 +119,13 @@ func GetAttributeStatisticsRows(connectionPool *dbconn.DBConn, tables []Table) (
inheritClause, statSlotClause, statCollationClause,
SchemaFilterClause("n"), utils.SliceToQuotedString(tablenames))

return connectionPool.Query(query)
}

func GetAttributeStatistics(connectionPool *dbconn.DBConn, tables []Table) map[uint32][]AttributeStatistic {
results, err := GetAttributeStatisticsRows(connectionPool, tables)
results := make([]AttributeStatistic, 0)
err := connectionPool.Select(&results, query)
gplog.FatalOnError(err)
stats := make(map[uint32][]AttributeStatistic)
for results.Next() {
var stat AttributeStatistic
err = results.StructScan(&stat)
gplog.FatalOnError(err)
for _, stat := range results {
stats[stat.Oid] = append(stats[stat.Oid], stat)
}
gplog.FatalOnError(results.Err())
return stats
}

Expand All @@ -145,7 +137,7 @@ type TupleStatistic struct {
RelTuples float64
}

func GetTupleStatisticsRows(connectionPool *dbconn.DBConn, tables []Table) (*sqlx.Rows, error) {
func GetTupleStatistics(connectionPool *dbconn.DBConn, tables []Table) map[uint32]TupleStatistic {
tablenames := make([]string, 0)
for _, table := range tables {
tablenames = append(tablenames, table.FQN())
Expand All @@ -163,19 +155,12 @@ func GetTupleStatisticsRows(connectionPool *dbconn.DBConn, tables []Table) (*sql
ORDER BY n.nspname, c.relname`,
SchemaFilterClause("n"), utils.SliceToQuotedString(tablenames))

return connectionPool.Query(query)
}

func GetTupleStatistics(connectionPool *dbconn.DBConn, tables []Table) map[uint32]TupleStatistic {
results, err := GetTupleStatisticsRows(connectionPool, tables)
results := make([]TupleStatistic, 0)
err := connectionPool.Select(&results, query)
gplog.FatalOnError(err)
stats := make(map[uint32]TupleStatistic)
for results.Next() {
var stat TupleStatistic
err = results.StructScan(&stat)
gplog.FatalOnError(err)
for _, stat := range results {
stats[stat.Oid] = stat
}
gplog.FatalOnError(results.Err())
return stats
}
32 changes: 10 additions & 22 deletions backup/wrappers.go
Original file line number Diff line number Diff line change
Expand Up @@ -773,30 +773,18 @@ func backupExtendedStatistic(metadataFile *utils.FileWithByteCount) {

func backupTableStatistics(statisticsFile *utils.FileWithByteCount, tables []Table) {
backupSessionGUC(statisticsFile)
tupleStats, err := GetTupleStatisticsRows(connectionPool, tables)
gplog.FatalOnError(err)
for tupleStats.Next() {
var stat TupleStatistic
err = tupleStats.StructScan(&stat)
gplog.FatalOnError(err)
table := Table{Relation: Relation{Schema: stat.Schema, Name: stat.Table}}
tupleQuery := GenerateTupleStatisticsQuery(table, stat)
printStatisticsStatementForTable(statisticsFile, globalTOC, table, tupleQuery)
}
gplog.FatalOnError(tupleStats.Err())
attStats, err := GetAttributeStatisticsRows(connectionPool, tables)
gplog.FatalOnError(err)
for attStats.Next() {
var stat AttributeStatistic
err = attStats.StructScan(&stat)
gplog.FatalOnError(err)
table := Table{Relation: Relation{Schema: stat.Schema, Name: stat.Table}}
attributeQueries := GenerateAttributeStatisticsQueries(table, stat)
for _, attrQuery := range attributeQueries {
printStatisticsStatementForTable(statisticsFile, globalTOC, table, attrQuery)

length := len(tables)
slice := 10000
for start := 0; start < length; start += slice {
end := start + slice
if end > length {
end = length
}
attStats := GetAttributeStatistics(connectionPool, tables[start:end])
tupleStats := GetTupleStatistics(connectionPool, tables[start:end])
PrintStatisticsStatements(statisticsFile, globalTOC, tables[start:end], attStats, tupleStats)
}
gplog.FatalOnError(attStats.Err())
}

func backupIncrementalMetadata() {
Expand Down

0 comments on commit 4944142

Please sign in to comment.