diff --git a/pghrep/plugins/A008.go b/pghrep/plugins/A008.go
new file mode 100644
index 0000000000000000000000000000000000000000..31705d53fff5600310333cb4f2e4c75c8e47b9ef
--- /dev/null
+++ b/pghrep/plugins/A008.go
@@ -0,0 +1,129 @@
+package main
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "io/ioutil"
+ "os"
+ "strconv"
+ "strings"
+
+ "../src/orderedmap"
+ "../src/pyraconv"
+)
+
+var CRITICAL_USAGE int = 90
+
+var Data map[string]interface{}
+
+type prepare string
+
+func saveJsonConclusionsRecommendations(data map[string]interface{}, conclusions []string, recommendations []string) {
+ filePath := pyraconv.ToString(data["source_path_full"])
+ jsonData, err := ioutil.ReadFile(filePath) // just pass the file name
+ if err != nil {
+ return
+ }
+ orderedData := orderedmap.New()
+ if err := json.Unmarshal([]byte(jsonData), &orderedData); err != nil {
+ return
+ } else {
+ orderedData.Set("conclusions", conclusions)
+ orderedData.Set("recommendations", recommendations)
+ resultJson, _ := orderedData.MarshalJSON()
+ var out bytes.Buffer
+ json.Indent(&out, resultJson, "", " ")
+ jfile, err := os.Create(filePath)
+ if err != nil {
+ return
+ }
+ defer jfile.Close()
+ out.WriteTo(jfile)
+ }
+}
+
+func getHostDbpointData(data map[string]interface{}, host string, point string) map[string]interface{} {
+ results := pyraconv.ToInterfaceMap(data["results"])
+ masterData := pyraconv.ToInterfaceMap(results[host])
+ masterData = pyraconv.ToInterfaceMap(masterData["data"])
+ hostData := pyraconv.ToInterfaceMap(masterData["db_data"])
+ return pyraconv.ToInterfaceMap(hostData[point])
+}
+
+func a008ConclusionsRecommendations(data map[string]interface{}) {
+ preparedData := a008PrepareData(data)
+ var conclusions []string
+ var recommendations []string
+
+ for host, hData := range preparedData {
+ hostData := pyraconv.ToInterfaceMap(hData)
+ for point, pPercent := range hostData {
+ pointPercent := int(pyraconv.ToInt64(pPercent))
+ if pointPercent > CRITICAL_USAGE {
+ // generate recommendation
+ pointData := getHostDbpointData(data, host, point)
+ conclusion := fmt.Sprintf(":warning: Volume `%s` at host `%s` where placed `%s` is filled more than on %d percent", pointData["mount_point"], host, pointData["path"], CRITICAL_USAGE)
+ recommendation := fmt.Sprintf(":warning: Please clean volume `%s` at host `%s` where placed `%s`", pointData["mount_point"], host, pointData["path"])
+ conclusions = append(conclusions, conclusion)
+ recommendations = append(recommendations, recommendation)
+ }
+ }
+ }
+
+ data["conclusions"] = conclusions
+ data["recommendations"] = recommendations
+ saveJsonConclusionsRecommendations(data, conclusions, recommendations)
+}
+
+func a008PrepareData(data map[string]interface{}) map[string]interface{} {
+ prepareData := make(map[string]interface{})
+
+ hosts := pyraconv.ToInterfaceMap(data["hosts"])
+ master := pyraconv.ToString(hosts["master"])
+ replicas := pyraconv.ToStringArray(hosts["replicas"])
+
+ results := pyraconv.ToInterfaceMap(data["results"])
+ masterData := pyraconv.ToInterfaceMap(results[master])
+ masterData = pyraconv.ToInterfaceMap(masterData["data"])
+ dbMasterData := pyraconv.ToInterfaceMap(masterData["db_data"])
+ //master
+ preparedMasterData := make(map[string]interface{})
+ for point, pointData := range dbMasterData {
+ if point == "_keys" {
+ continue
+ }
+ pdata := pyraconv.ToInterfaceMap(pointData)
+ usePercent := pyraconv.ToString(pdata["use_percent"])
+ usePercent = strings.Replace(usePercent, "%", "", 1)
+ percent, _ := strconv.Atoi(usePercent)
+ preparedMasterData[point] = percent
+ }
+ prepareData[master] = preparedMasterData
+ // replicas
+ for _, replica := range replicas {
+ replicaData := pyraconv.ToInterfaceMap(results[master])
+ masterData = pyraconv.ToInterfaceMap(replicaData["data"])
+ dbReplicaData := pyraconv.ToInterfaceMap(replicaData["db_data"])
+ preparedReplicaData := make(map[string]interface{})
+ for point, pointData := range dbReplicaData {
+ if point == "_keys" {
+ continue
+ }
+ pdata := pyraconv.ToInterfaceMap(pointData)
+ usePercent := pyraconv.ToString(pdata["use_percent"])
+ usePercent = strings.Replace(usePercent, "%", "", 1)
+ percent, _ := strconv.Atoi(usePercent)
+ preparedReplicaData[point] = percent
+ }
+ prepareData[replica] = preparedReplicaData
+ }
+ return prepareData
+}
+
+func (g prepare) Prepare(data map[string]interface{}) map[string]interface{} {
+ a008ConclusionsRecommendations(data)
+ return data
+}
+
+var Preparer prepare
diff --git a/pghrep/plugins/F005.go b/pghrep/plugins/F005.go
new file mode 100644
index 0000000000000000000000000000000000000000..afbd5f62f8d969d3831ae63108349b898b15b2d4
--- /dev/null
+++ b/pghrep/plugins/F005.go
@@ -0,0 +1,87 @@
+package main
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "io/ioutil"
+ "os"
+ "strconv"
+
+ "../src/orderedmap"
+ "../src/pyraconv"
+)
+
+var CRITICAL_BLOAT float64 = 90
+
+var Data map[string]interface{}
+
+type prepare string
+
+func saveJsonConclusionsRecommendations(data map[string]interface{}, conclusions []string, recommendations []string) {
+ filePath := pyraconv.ToString(data["source_path_full"])
+ jsonData, err := ioutil.ReadFile(filePath) // just pass the file name
+ if err != nil {
+ return
+ }
+ orderedData := orderedmap.New()
+ if err := json.Unmarshal([]byte(jsonData), &orderedData); err != nil {
+ return
+ } else {
+ orderedData.Set("conclusions", conclusions)
+ orderedData.Set("recommendations", recommendations)
+ resultJson, jerr := orderedData.MarshalJSON()
+ if jerr == nil {
+ var out bytes.Buffer
+ json.Indent(&out, resultJson, "", " ")
+ jfile, err := os.Create(filePath)
+ if err != nil {
+ return
+ }
+ defer jfile.Close()
+ out.WriteTo(jfile)
+ } else {
+ return
+ }
+ }
+}
+
+func f005ConclusionsRecommendations(data map[string]interface{}) {
+ var conclusions []string
+ var recommendations []string
+
+ hosts := pyraconv.ToInterfaceMap(data["hosts"])
+ master := pyraconv.ToString(hosts["master"])
+ results := pyraconv.ToInterfaceMap(data["results"])
+ masterData := pyraconv.ToInterfaceMap(results[master])
+ masterData = pyraconv.ToInterfaceMap(masterData["data"])
+ indexesData := pyraconv.ToInterfaceMap(masterData["index_bloat"])
+ //master
+ preparedData := make(map[string]interface{})
+ for index, indexData := range indexesData {
+ if index == "_keys" {
+ continue
+ }
+ idata := pyraconv.ToInterfaceMap(indexData)
+ bloatPercent := pyraconv.ToString(idata["Bloat ratio"])
+ percent, _ := strconv.ParseFloat(bloatPercent, 32)
+ if percent > CRITICAL_BLOAT {
+ conclusion := fmt.Sprintf(":warning: Index `%s` bloated more than `%.0f` percent", idata["Index (Table)"], CRITICAL_BLOAT)
+ recommendation := fmt.Sprintf(":warning: Please check index `%s`", idata["Index (Table)"])
+ conclusions = append(conclusions, conclusion)
+ recommendations = append(recommendations, recommendation)
+ }
+ preparedData[index] = percent
+ }
+
+ data["conclusions"] = conclusions
+ data["recommendations"] = recommendations
+ saveJsonConclusionsRecommendations(data, conclusions, recommendations)
+}
+
+func (g prepare) Prepare(data map[string]interface{}) map[string]interface{} {
+ f005ConclusionsRecommendations(data)
+ return data
+}
+
+var Preparer prepare
diff --git a/pghrep/src/main.go b/pghrep/src/main.go
index 4349872b050accd87bd7b579645b5f4e71215b23..cd0511e1928d00f04dd1620d634be9d6ee7f7fe0 100644
--- a/pghrep/src/main.go
+++ b/pghrep/src/main.go
@@ -5,396 +5,401 @@ Postgres Healt Reporter
2018 © Postgres.ai
Perform a generation of Markdown report based on JSON results of postgres-checkup
-Usage:
+Usage:
pghrep --checkdata=file:///path_to_check_results.json --outdir=/home/results
*/
package main
import (
- "fmt"
- "os"
- "flag"
- "strings"
- "encoding/json"
- "io/ioutil"
- "path/filepath"
- "./pyraconv"
- "log"
- "text/template"
- "sort"
- "strconv"
- "./orderedmap"
+ "encoding/json"
+ "flag"
+ "fmt"
+ "io/ioutil"
+ "log"
+ "os"
+ "path/filepath"
+ "sort"
+ "strconv"
+ "strings"
+ "text/template"
+
+ "./orderedmap"
+ "./pyraconv"
)
var DEBUG bool = false
// Output debug message
func Dbg(v ...interface{}) {
- if DEBUG {
- message := ""
- for _, value := range v {
- message = message + " " + pyraconv.ToString(value)
- }
- log.Println(">>> DEBUG:", message)
- }
+ if DEBUG {
+ message := ""
+ for _, value := range v {
+ message = message + " " + pyraconv.ToString(value)
+ }
+ log.Println(">>> DEBUG:", message)
+ }
}
// Output debug message
func Err(v ...interface{}) {
- message := ""
- for _, value := range v {
- message = message + " " + pyraconv.ToString(value)
- }
- log.Println(">>> ERROR:", message)
+ message := ""
+ for _, value := range v {
+ message = message + " " + pyraconv.ToString(value)
+ }
+ log.Println(">>> ERROR:", message)
}
-
// Prepropess file paths
// Allow absulute and relative (of pwd) paths with or wothout file:// prefix
// Return absoulute path of file
func GetFilePath(name string) string {
- filePath := name
- // remove file:// prefix
- if strings.HasPrefix(strings.ToLower(filePath), "file://") {
- filePath = strings.Replace(filePath, "file://", "", 1)
- }
- if strings.HasPrefix(strings.ToLower(filePath), "/") {
- // absoulute path will use as is
- return filePath
- } else {
- // for relative path will combine with current path
- curDir, err := os.Getwd()
- if err != nil {
- Dbg("Can't determine current path")
- }
- if strings.HasSuffix(strings.ToLower(curDir), "/") {
- filePath = curDir + filePath
- } else {
- filePath = curDir + "/" + filePath
- }
- return filePath
- }
+ filePath := name
+ // remove file:// prefix
+ if strings.HasPrefix(strings.ToLower(filePath), "file://") {
+ filePath = strings.Replace(filePath, "file://", "", 1)
+ }
+ if strings.HasPrefix(strings.ToLower(filePath), "/") {
+ // absoulute path will use as is
+ return filePath
+ } else {
+ // for relative path will combine with current path
+ curDir, err := os.Getwd()
+ if err != nil {
+ Dbg("Can't determine current path")
+ }
+ if strings.HasSuffix(strings.ToLower(curDir), "/") {
+ filePath = curDir + filePath
+ } else {
+ filePath = curDir + "/" + filePath
+ }
+ return filePath
+ }
}
// Check file exists
// Allow absulute and relative (of pwd) paths with or wothout file:// prefix
// Return boolean value
func FileExists(name string) bool {
- filePath := GetFilePath(name)
- if _, err := os.Stat(filePath); err != nil {
- if os.IsNotExist(err) {
- return false
- }
- }
- return true
+ filePath := GetFilePath(name)
+ if _, err := os.Stat(filePath); err != nil {
+ if os.IsNotExist(err) {
+ return false
+ }
+ }
+ return true
}
// Parse json data from string to map
// Return map[string]interface{}
func ParseJson(jsonData string) map[string]interface{} {
- orderedData := orderedmap.New()
- if err := json.Unmarshal([]byte(jsonData), &orderedData); err != nil {
- Err("Can't parse json data:", err)
- return nil
- } else {
- dt := orderedData.ToInterfaceArray()
- return dt
- }
+ orderedData := orderedmap.New()
+ if err := json.Unmarshal([]byte(jsonData), &orderedData); err != nil {
+ Err("Can't parse json data:", err)
+ return nil
+ } else {
+ dt := orderedData.ToInterfaceArray()
+ return dt
+ }
}
// Load json data from file by path
// Return map[string]interface{}
func LoadJsonFile(filePath string) map[string]interface{} {
- if FileExists(filePath) {
- fileContent, err := ioutil.ReadFile(GetFilePath(filePath)) // just pass the file name
- if err != nil {
- Err("Can't read file: ", filePath, err)
- return nil
- }
- return ParseJson(string(fileContent))
- }
- return nil
+ if FileExists(filePath) {
+ fileContent, err := ioutil.ReadFile(GetFilePath(filePath)) // just pass the file name
+ if err != nil {
+ Err("Can't read file: ", filePath, err)
+ return nil
+ }
+ return ParseJson(string(fileContent))
+ }
+ return nil
}
// Load data dependencies
func loadDependencies(data map[string]interface{}) {
- dep := data["dependencies"]
- dependencies := dep.(map[string]interface{})
- for key, value := range dependencies {
- depData := LoadJsonFile(pyraconv.ToString(value))
- dependencies[key] = depData
- }
+ dep := data["dependencies"]
+ dependencies := dep.(map[string]interface{})
+ for key, value := range dependencies {
+ depData := LoadJsonFile(pyraconv.ToString(value))
+ dependencies[key] = depData
+ }
}
// Load report templates from files
func loadTemplates() *template.Template {
- dir, err := filepath.Abs(filepath.Dir(os.Args[0]))
- if err != nil {
- Dbg("Can't determine current path")
- }
-
- var templates *template.Template
- var allFiles []string
- files, err := ioutil.ReadDir(dir + "/../templates")
- if err != nil {
- fmt.Println(err)
- }
- for _, file := range files {
- fileName := file.Name()
- if strings.HasSuffix(fileName, ".tpl") {
- allFiles = append(allFiles, dir + "/../templates/" + fileName)
- }
- }
-
- tplFuncMap := make(template.FuncMap)
- tplFuncMap["Split"] = Split
- tplFuncMap["Trim"] = Trim
- tplFuncMap["Replace"] = Replace
- tplFuncMap["Code"] = Code
- tplFuncMap["Nobr"] = Nobr
- tplFuncMap["Br"] = Br
- tplFuncMap["ByteFormat"] = ByteFormat
- tplFuncMap["UnitValue"] = UnitValue
- tplFuncMap["RawIntUnitValue"] = RawIntUnitValue
- tplFuncMap["RoundUp"] = Round
- tplFuncMap["LimitStr"] = LimitStr
- tplFuncMap["Add"] = Add
- tplFuncMap["Sub"] = Sub
- tplFuncMap["Mul"] = Mul
- tplFuncMap["Div"] = Div
- tplFuncMap["NumFormat"] = NumFormat
- tplFuncMap["MsFormat"] = MsFormat
- tplFuncMap["DtFormat"] = DtFormat
- tplFuncMap["RawIntFormat"] = RawIntFormat
- tplFuncMap["RawFloatFormat"] = RawFloatFormat
- tplFuncMap["Int"] = Int
-
- templates, err = template.New("").Funcs(tplFuncMap).ParseFiles(allFiles...)
- if err != nil {
- log.Fatal("Can't load templates", err)
- return nil
- }
-
- return templates
+ dir, err := filepath.Abs(filepath.Dir(os.Args[0]))
+ if err != nil {
+ Dbg("Can't determine current path")
+ }
+
+ var templates *template.Template
+ var allFiles []string
+ files, err := ioutil.ReadDir(dir + "/../templates")
+ if err != nil {
+ fmt.Println(err)
+ }
+ for _, file := range files {
+ fileName := file.Name()
+ if strings.HasSuffix(fileName, ".tpl") {
+ allFiles = append(allFiles, dir+"/../templates/"+fileName)
+ }
+ }
+
+ tplFuncMap := make(template.FuncMap)
+ tplFuncMap["Split"] = Split
+ tplFuncMap["Trim"] = Trim
+ tplFuncMap["Replace"] = Replace
+ tplFuncMap["Code"] = Code
+ tplFuncMap["Nobr"] = Nobr
+ tplFuncMap["Br"] = Br
+ tplFuncMap["ByteFormat"] = ByteFormat
+ tplFuncMap["UnitValue"] = UnitValue
+ tplFuncMap["RawIntUnitValue"] = RawIntUnitValue
+ tplFuncMap["RoundUp"] = Round
+ tplFuncMap["LimitStr"] = LimitStr
+ tplFuncMap["Add"] = Add
+ tplFuncMap["Sub"] = Sub
+ tplFuncMap["Mul"] = Mul
+ tplFuncMap["Div"] = Div
+ tplFuncMap["NumFormat"] = NumFormat
+ tplFuncMap["MsFormat"] = MsFormat
+ tplFuncMap["DtFormat"] = DtFormat
+ tplFuncMap["RawIntFormat"] = RawIntFormat
+ tplFuncMap["RawFloatFormat"] = RawFloatFormat
+ tplFuncMap["Int"] = Int
+
+ templates, err = template.New("").Funcs(tplFuncMap).ParseFiles(allFiles...)
+ if err != nil {
+ log.Fatal("Can't load templates", err)
+ return nil
+ }
+
+ return templates
}
// Prepare raw json data for every host
func getRawData(data map[string]interface{}) {
- // for every host get data
- var rawData []interface{}
- hosts := pyraconv.ToInterfaceMap(data["hosts"])
- Dbg("Data hosts: ", hosts)
- results := pyraconv.ToInterfaceMap(data["results"])
- masterName := pyraconv.ToString(hosts["master"])
- masterResults := pyraconv.ToInterfaceMap(results[masterName])
- masterData := pyraconv.ToInterfaceMap(masterResults["data"])
- masterJson, err := json.Marshal(masterData)
+ // for every host get data
+ var rawData []interface{}
+ hosts := pyraconv.ToInterfaceMap(data["hosts"])
+ Dbg("Data hosts: ", hosts)
+ results := pyraconv.ToInterfaceMap(data["results"])
+ masterName := pyraconv.ToString(hosts["master"])
+ masterResults := pyraconv.ToInterfaceMap(results[masterName])
+ masterData := pyraconv.ToInterfaceMap(masterResults["data"])
+ masterJson, err := json.Marshal(masterData)
if err == nil {
- masterItem := make(map[string]interface{})
- masterItem["host"] = masterName
- masterItem["data"] = string(masterJson)
- rawData = append(rawData, masterItem)
- }
- replicas := pyraconv.ToStringArray(hosts["replicas"])
- for _, host := range replicas {
- hostResults := pyraconv.ToInterfaceMap(results[host])
- hostData := pyraconv.ToInterfaceMap(hostResults["data"])
- hostJson, err := json.Marshal(hostData)
- if err == nil {
- hostItem := make(map[string]interface{})
- hostItem["host"] = host
- hostItem["data"] = string(hostJson)
- rawData = append(rawData, hostItem)
- }
- }
- data["rawData"] = rawData
+ masterItem := make(map[string]interface{})
+ masterItem["host"] = masterName
+ masterItem["data"] = string(masterJson)
+ rawData = append(rawData, masterItem)
+ }
+ replicas := pyraconv.ToStringArray(hosts["replicas"])
+ for _, host := range replicas {
+ hostResults := pyraconv.ToInterfaceMap(results[host])
+ hostData := pyraconv.ToInterfaceMap(hostResults["data"])
+ hostJson, err := json.Marshal(hostData)
+ if err == nil {
+ hostItem := make(map[string]interface{})
+ hostItem["host"] = host
+ hostItem["data"] = string(hostJson)
+ rawData = append(rawData, hostItem)
+ }
+ }
+ data["rawData"] = rawData
}
/*
Generate MD reports by given check Id
CheckId can be either ID of concrete check (e.g. H003) or represent the whole category (e.g. K000)
*/
-func generateMdReports(checkId string, reportData map[string]interface{}, outputDir string) bool{
- category := checkId[0:1]
- checkNum, err := strconv.ParseInt(checkId[1:4], 10, 64)
-
- reportPrefix := ""
- if checkNum != 0 {
- reportPrefix = checkId // specified check given
- } else {
- reportPrefix = category // category given
- }
-
- dir, _ := filepath.Abs(filepath.Dir(os.Args[0]))
- if err != nil {
- Err(err)
- return false
- }
- files, err := ioutil.ReadDir(dir + "/../templates")
- if err != nil {
- Err(err)
- return false
- }
- for _, file := range files {
- fileName := file.Name()
- if strings.HasPrefix(fileName, reportPrefix) && strings.HasSuffix(fileName, ".tpl") {
- curCheckId := fileName[0:4]
- outputFileName := strings.Replace(fileName, ".tpl", ".md", -1)
- reportData["checkId"] = curCheckId
- if !generateMdReport(curCheckId, outputFileName, reportData, outputDir) {
- Err("Can't generate report " + outputFileName + " based on " + checkId + " json data")
- return false
- }
- }
- }
-
- return true
+func generateMdReports(checkId string, reportData map[string]interface{}, outputDir string) bool {
+ category := checkId[0:1]
+ checkNum, err := strconv.ParseInt(checkId[1:4], 10, 64)
+
+ reportPrefix := ""
+ if checkNum != 0 {
+ reportPrefix = checkId // specified check given
+ } else {
+ reportPrefix = category // category given
+ }
+
+ dir, _ := filepath.Abs(filepath.Dir(os.Args[0]))
+ if err != nil {
+ Err(err)
+ return false
+ }
+ files, err := ioutil.ReadDir(dir + "/../templates")
+ if err != nil {
+ Err(err)
+ return false
+ }
+ for _, file := range files {
+ fileName := file.Name()
+ if strings.HasPrefix(fileName, reportPrefix) && strings.HasSuffix(fileName, ".tpl") {
+ curCheckId := fileName[0:4]
+ outputFileName := strings.Replace(fileName, ".tpl", ".md", -1)
+ reportData["checkId"] = curCheckId
+ if !generateMdReport(curCheckId, outputFileName, reportData, outputDir) {
+ Err("Can't generate report " + outputFileName + " based on " + checkId + " json data")
+ return false
+ }
+ }
+ }
+
+ return true
}
// Generate md report (file) on base of reportData and save them to file in outputDir
-func generateMdReport(checkId string, reportFilename string, reportData map[string]interface{}, outputDir string) bool{
- var outputFileName string
- if len(reportFilename) > 0 {
- outputFileName = reportFilename
- } else {
- outputFileName = checkId + ".md"
- }
- if strings.HasSuffix(strings.ToLower(outputDir), "/") {
- outputFileName = outputDir + outputFileName
- } else {
- outputFileName = outputDir + "/" + outputFileName
- }
- _, err := filepath.Abs(filepath.Dir(os.Args[0]))
- f, err := os.OpenFile(outputFileName, os.O_CREATE | os.O_RDWR, 0777)
- if err != nil {
- Err("Can't create report file", err)
- return false
- }
- defer f.Close()
- f.Truncate(0)
-
- templates := loadTemplates()
- if templates == nil {
- log.Fatal("Can't load template")
- }
- reportFileName := checkId + ".tpl"
- reporTpl := templates.Lookup(reportFileName)
- data := reportData
- if reporTpl == nil {
- Err("Template " + checkId + ".tpl not found.")
- getRawData(data)
- reportFileName = "raw.tpl"
- reporTpl = templates.Lookup(reportFileName)
- }
- err = reporTpl.ExecuteTemplate(f, reportFileName, data)
- if err != nil {
- Err("Template execute error is", err)
- defer os.Remove(outputFileName)
- return false
- } else {
- return true
- }
+func generateMdReport(checkId string, reportFilename string, reportData map[string]interface{}, outputDir string) bool {
+ var outputFileName string
+ if len(reportFilename) > 0 {
+ outputFileName = reportFilename
+ } else {
+ outputFileName = checkId + ".md"
+ }
+ if strings.HasSuffix(strings.ToLower(outputDir), "/") {
+ outputFileName = outputDir + outputFileName
+ } else {
+ outputFileName = outputDir + "/" + outputFileName
+ }
+ _, err := filepath.Abs(filepath.Dir(os.Args[0]))
+ f, err := os.OpenFile(outputFileName, os.O_CREATE|os.O_RDWR, 0777)
+ if err != nil {
+ Err("Can't create report file", err)
+ return false
+ }
+ defer f.Close()
+ f.Truncate(0)
+
+ templates := loadTemplates()
+ if templates == nil {
+ log.Fatal("Can't load template")
+ }
+ reportFileName := checkId + ".tpl"
+ reporTpl := templates.Lookup(reportFileName)
+ data := reportData
+ if reporTpl == nil {
+ Err("Template " + checkId + ".tpl not found.")
+ getRawData(data)
+ reportFileName = "raw.tpl"
+ reporTpl = templates.Lookup(reportFileName)
+ }
+ err = reporTpl.ExecuteTemplate(f, reportFileName, data)
+ if err != nil {
+ Err("Template execute error is", err)
+ defer os.Remove(outputFileName)
+ return false
+ } else {
+ return true
+ }
}
// Sort hosts on master and replicas by role and index.
// Return map {"master":name string, "replicas":[replica1 string, replica2 string]}
func determineMasterReplica(data map[string]interface{}) {
- hostRoles := make(map[string]interface{})
- var sortedReplicas []string
- replicas := make(map[int]string)
- nodes_json := pyraconv.ToInterfaceMap(data["last_nodes_json"])
- hosts := pyraconv.ToInterfaceMap(nodes_json["hosts"]);
- hostRoles["master"] = nil
- for host, value := range hosts {
- hostData := pyraconv.ToInterfaceMap(value)
- if hostData["role"] == "master" {
- hostRoles["master"] = host
- } else {
- if host != "_keys" {
- index, _ := strconv.Atoi(pyraconv.ToString(hostData["index"]))
- replicas[index] = host
- }
- }
- }
- var keys []int
- for k := range replicas {
- keys = append(keys, k)
- }
- sort.Ints(keys)
- for _, k := range keys {
- sortedReplicas = append(sortedReplicas, replicas[k])
- }
-
- hostRoles["replicas"] = sortedReplicas
- data["hosts"] = hostRoles
+ hostRoles := make(map[string]interface{})
+ var sortedReplicas []string
+ replicas := make(map[int]string)
+ nodes_json := pyraconv.ToInterfaceMap(data["last_nodes_json"])
+ hosts := pyraconv.ToInterfaceMap(nodes_json["hosts"])
+ hostRoles["master"] = nil
+ for host, value := range hosts {
+ hostData := pyraconv.ToInterfaceMap(value)
+ if hostData["role"] == "master" {
+ hostRoles["master"] = host
+ } else {
+ if host != "_keys" {
+ index, _ := strconv.Atoi(pyraconv.ToString(hostData["index"]))
+ replicas[index] = host
+ }
+ }
+ }
+ var keys []int
+ for k := range replicas {
+ keys = append(keys, k)
+ }
+ sort.Ints(keys)
+ for _, k := range keys {
+ sortedReplicas = append(sortedReplicas, replicas[k])
+ }
+
+ hostRoles["replicas"] = sortedReplicas
+ data["hosts"] = hostRoles
}
func main() {
- // get input data checkId, checkData
- var checkId string
- var checkData string
- var resultData map[string]interface{}
- checkDataPtr := flag.String("checkdata", "", "an filepath to json report")
- outDirPtr := flag.String("outdir", "", "an directory where report need save")
- debugPtr := flag.Int("debug", 0, "enable debug mode (must be defined 1 or 0 (default))")
- flag.Parse()
- checkData=*checkDataPtr
-
- if *debugPtr == 1 {
- DEBUG = true
- }
-
- if FileExists(checkData) {
- resultData = LoadJsonFile(checkData)
- if resultData == nil {
- log.Fatal("ERROR: File given by --checkdata content wrong json data.")
- return
- }
- resultData["source_path_full"] = checkData
- resultData["source_path_parts"] = strings.Split(checkData, string(os.PathSeparator))
- } else {
- log.Println("ERROR: File given by --checkdata not found")
- return
- }
-
- if resultData != nil {
- checkId = pyraconv.ToString(resultData["checkId"])
- } else {
- log.Fatal("ERROR: Content given by --checkdata is wrong json content.")
- }
-
- checkId = strings.ToUpper(checkId)
- loadDependencies(resultData)
- determineMasterReplica(resultData)
-
- l, err := newLoader()
- if err != nil {
- fmt.Fprintf(os.Stderr, "%v", err)
- }
- defer l.destroy()
-
- var reportData map[string]interface{}
- objectPath, err := l.get(checkId);
- if err != nil {
- Dbg("Cannot find and load plugin.", err)
- reportData = resultData
- } else {
- result, err := l.call(objectPath, resultData)
- if err != nil {
- fmt.Fprintf(os.Stderr, "%v", err)
- }
- bodyBytes, _ := json.Marshal(result)
- json.Unmarshal(bodyBytes, &reportData)
- }
-
- var outputDir string
- if len(*outDirPtr) == 0 {
- outputDir = "./"
- } else {
- outputDir = *outDirPtr
- }
- reportDone := generateMdReports(checkId, reportData, outputDir)
- if ! reportDone {
- log.Fatal("Cannot generate report. Data file or template is wrong.")
- }
+ // get input data checkId, checkData
+ var checkId string
+ var checkData string
+ var resultData map[string]interface{}
+ checkDataPtr := flag.String("checkdata", "", "an filepath to json report")
+ outDirPtr := flag.String("outdir", "", "an directory where report need save")
+ debugPtr := flag.Int("debug", 0, "enable debug mode (must be defined 1 or 0 (default))")
+ flag.Parse()
+ checkData = *checkDataPtr
+
+ if *debugPtr == 1 {
+ DEBUG = true
+ }
+
+ if FileExists(checkData) {
+ resultData = LoadJsonFile(checkData)
+ if resultData == nil {
+ log.Fatal("ERROR: File given by --checkdata content wrong json data.")
+ return
+ }
+ resultData["source_path_full"] = checkData
+ resultData["source_path_parts"] = strings.Split(checkData, string(os.PathSeparator))
+ } else {
+ log.Println("ERROR: File given by --checkdata not found")
+ return
+ }
+
+ if resultData != nil {
+ checkId = pyraconv.ToString(resultData["checkId"])
+ } else {
+ log.Fatal("ERROR: Content given by --checkdata is wrong json content.")
+ }
+
+ checkId = strings.ToUpper(checkId)
+ loadDependencies(resultData)
+ determineMasterReplica(resultData)
+
+ l, err := newLoader()
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "%v", err)
+ }
+ defer l.destroy()
+
+ var reportData map[string]interface{}
+ objectPath, err := l.get(checkId)
+ if err != nil {
+ Dbg("Cannot find and load plugin.", err)
+ reportData = resultData
+ } else {
+ result, err := l.call(objectPath, resultData)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "%v", err)
+ }
+ bodyBytes, _ := json.Marshal(result)
+ json.Unmarshal(bodyBytes, &reportData)
+ hosts := pyraconv.ToInterfaceMap(reportData["hosts"])
+ if hosts["replicas"] == nil {
+ // make empty replicas list, because unmarshal above remove empty arrays
+ hosts["replicas"] = make(map[string]interface{})
+ }
+ }
+
+ var outputDir string
+ if len(*outDirPtr) == 0 {
+ outputDir = "./"
+ } else {
+ outputDir = *outDirPtr
+ }
+ reportDone := generateMdReports(checkId, reportData, outputDir)
+ if !reportDone {
+ log.Fatal("Cannot generate report. Data file or template is wrong.")
+ }
}
diff --git a/pghrep/templates/A008.tpl b/pghrep/templates/A008.tpl
index f5dc3ceac8bc80e56bf323805f23dba283d69ccc..65f87f60cd8ca26be1f2138d98f4a9d9d785aef0 100644
--- a/pghrep/templates/A008.tpl
+++ b/pghrep/templates/A008.tpl
@@ -82,4 +82,18 @@ Name | FS Type | Size | Available | Usage | Used | Mount Point | Path | Device
## Conclusions ##
+{{- if .conclusions }}
+{{ range $conclusion := .conclusions -}}
+{{ $conclusion }}
+{{ end }}
+{{ end }}
+
## Recommendations ##
+
+{{- if .recommendations }}
+{{ range $recommendation := .recommendations -}}
+{{ $recommendation }}
+{{ end }}
+{{ end }}
+
+
diff --git a/pghrep/templates/F002.tpl b/pghrep/templates/F002.tpl
index c3907280c9d31ca2c611ba78d61566f932aa6348..85b2c20c33dc968ee93c477ee2bd2884d2f9c0f5 100644
--- a/pghrep/templates/F002.tpl
+++ b/pghrep/templates/F002.tpl
@@ -26,7 +26,7 @@ Current database: {{ .database }}
----------|-----|------------------|---------|-----------------|--------------------
{{ range $i, $key := (index (index (index (index .results .hosts.master) "data") "per_database") "_keys") }}
{{- $value := (index (index (index (index $.results $.hosts.master) "data") "per_database") $key) -}}
-{{ index $value "relation"}}{{if $value.overrided_settings}}*{{ end }} |
+{{ index $value "relation"}}{{if $value.overrided_settings}} **\***{{ end }} |
{{- NumFormat (index $value "age") -1 }} |
{{- index $value "capacity_used"}} |
{{- if (index $value "warning") }} ⚠ {{ else }} {{ end }} |
@@ -35,7 +35,7 @@ Current database: {{ .database }}
{{ end }}{{/* range */}}
{{/*- end -*/}}{{/* if per_instance exists */}}
{{- if gt (Int (index (index (index .results .hosts.master) "data") "overrided_settings_count")) 0 }}
-* This table has specific autovacuum settings. See 'F001 Autovacuum: Current settings'
+**\*** This table has specific autovacuum settings. See 'F001 Autovacuum: Current settings'
{{- end }}
{{- else -}}{{/*Master data*/}}
No data
diff --git a/pghrep/templates/F003.tpl b/pghrep/templates/F003.tpl
index a5b8f3d7b3d4db04f9058df31728cccf76a8bc79..7de9aeb6b4526ea7e2c623dc53319508c45835d6 100644
--- a/pghrep/templates/F003.tpl
+++ b/pghrep/templates/F003.tpl
@@ -14,7 +14,7 @@ Stats reset: {{ (index (index (index .results .hosts.master) "data") "database_s
----------|------|-----------------------|-------------------|----------|---------|-----------|-----------|-----------|--------------------|------------|------------|-----------
{{ range $i, $key := (index (index (index (index .results .hosts.master) "data") "dead_tuples") "_keys") }}
{{- $value := (index (index (index (index $.results $.hosts.master) "data") "dead_tuples") $key) -}}
-{{ index $value "relation"}}{{if $value.overrided_settings}}*{{ end }} |
+{{ index $value "relation"}}{{if $value.overrided_settings}} **\***{{ end }} |
{{- index $value "relkind"}} |
{{- index $value "since_last_autovacuum"}} |
{{- index $value "since_last_vacuum"}} |
@@ -29,7 +29,7 @@ Stats reset: {{ (index (index (index .results .hosts.master) "data") "database_s
{{- if ge (Int (index $value "dead_ratio")) 10 }} **{{ (index $value "dead_ratio")}}** {{else}} {{ (index $value "dead_ratio")}} {{end}}
{{ end }}
{{- if gt (Int (index (index (index .results .hosts.master) "data") "overrided_settings_count")) 0 }}
-* This table has specific autovacuum settings. See 'F001 Autovacuum: Current settings'
+**\*** This table has specific autovacuum settings. See 'F001 Autovacuum: Current settings'
{{- end }}
{{- else -}}{{/* dead_tuples */}}
No data
diff --git a/pghrep/templates/F004.tpl b/pghrep/templates/F004.tpl
index ade50422f0d84f1f04dc879d5da13a594e146d5b..b2d0c9bbe2914d430dc659d89fd272446d63fcab 100644
--- a/pghrep/templates/F004.tpl
+++ b/pghrep/templates/F004.tpl
@@ -19,7 +19,7 @@ Current database: {{ .database }}
{{- if (index (index (index (index $.results $.hosts.master) "data") "heap_bloat_total") "Bloat ratio") }}{{- if ge (Int (index (index (index (index $.results $.hosts.master) "data") "heap_bloat_total") "Bloat ratio" )) $minRatioWarning }}**{{- RawFloatFormat (index (index (index (index $.results $.hosts.master) "data") "heap_bloat_total") "Bloat ratio" ) 2 }}**{{else}}{{- RawFloatFormat (index (index (index (index $.results $.hosts.master) "data") "heap_bloat_total") "Bloat ratio") 2 }}{{ end }}|||{{ end }}{{ end }}
{{ range $i, $key := (index (index (index (index .results .hosts.master) "data") "heap_bloat") "_keys") }}
{{- $value := (index (index (index (index $.results $.hosts.master) "data") "heap_bloat") $key ) -}}
-{{ $key }}{{if $value.overrided_settings}}*{{ end }} |
+{{ $key }}{{if $value.overrided_settings}} **\***{{ end }} |
{{- ByteFormat ( index $value "Real size bytes" ) 2 }} |
{{- "~" }}{{ ByteFormat ( index $value "Extra size bytes" ) 2 }} ({{- NumFormat ( index $value "Extra_ratio" ) 2 }}%)|
{{- if ( index $value "Bloat size bytes")}}{{ ByteFormat ( index $value "Bloat size bytes") 2 }}{{end}} |
@@ -30,7 +30,7 @@ Current database: {{ .database }}
{{- ( index $value "Fillfactor") }}
{{ end }} {{/*range*/}}
{{- if gt (Int (index (index (index .results .hosts.master) "data") "overrided_settings_count")) 0 }}
-* This table has specific autovacuum settings. See 'F001 Autovacuum: Current settings'
+**\*** This table has specific autovacuum settings. See 'F001 Autovacuum: Current settings'
{{- end }}
{{- else }}{{/* if heap_bloat */}}
No data
diff --git a/pghrep/templates/F005.tpl b/pghrep/templates/F005.tpl
index 75f75bd16b4ddc1b5433940cce390ef72fa526d9..65a638f4b29494ac93c224c1a348354204b29d23 100644
--- a/pghrep/templates/F005.tpl
+++ b/pghrep/templates/F005.tpl
@@ -17,8 +17,7 @@ Current database: {{ .database }}
{{- if ge (Int (index (index (index (index $.results $.hosts.master) "data") "index_bloat_total") "Bloat ratio" )) $minRatioWarning }}**{{- RawFloatFormat (index (index (index (index $.results $.hosts.master) "data") "index_bloat_total") "Bloat ratio" ) 2 }}**{{else}}{{- RawFloatFormat (index (index (index (index $.results $.hosts.master) "data") "index_bloat_total") "Bloat ratio" ) 2 }}{{end}}||
{{ range $i, $key := (index (index (index (index .results .hosts.master) "data") "index_bloat") "_keys") }}
{{- $value := (index (index (index (index $.results $.hosts.master) "data") "index_bloat") $key) -}}
-{{- $tableIndex := Split $key "\n" -}}
-{{ $table := Trim (index $tableIndex 1) " ()"}}{{ (index $tableIndex 0) }} ({{ $table }}{{if $value.overrided_settings}}*{{ end }}) |
+{{ $value.index_name }} ({{ $value.table_name }}{{if $value.overrided_settings}} **\***{{ end }}) |
{{- ByteFormat ( index $value "Real size bytes") 2 }} |
{{- if ( index $value "Extra size bytes")}}{{- "~" }}{{ ByteFormat ( index $value "Extra size bytes" ) 2 }} ({{- NumFormat ( index $value "Extra_ratio" ) 2 }}%){{end}} |
{{- if ( index $value "Bloat size bytes")}}{{ ByteFormat ( index $value "Bloat size bytes") 2 }}{{end}} |
@@ -28,7 +27,7 @@ Current database: {{ .database }}
{{- ( index $value "fillfactor") }}
{{ end }}
{{- if gt (Int (index (index (index .results .hosts.master) "data") "overrided_settings_count")) 0 }}
-* This table has specific autovacuum settings. See 'F001 Autovacuum: Current settings'
+**\*** This table has specific autovacuum settings. See 'F001 Autovacuum: Current settings'
{{- end }}
{{- else -}}{{/*Master data*/}}
No data
@@ -39,6 +38,18 @@ No data
## Conclusions ##
+{{- if .conclusions }}
+{{ range $conclusion := .conclusions -}}
+{{ $conclusion }}
+{{ end }}
+{{ end }}
## Recommendations ##
+{{- if .recommendations }}
+{{ range $recommendation := .recommendations -}}
+{{ $recommendation }}
+{{ end }}
+{{ end }}
+
+
diff --git a/resources/checks/A004_cluster_info.sh b/resources/checks/A004_cluster_info.sh
index b79121721ab2ffa886bb4bc2d86b71462c32fb3c..dbf5554fced347135374302880fe5d60e5fafbdc 100755
--- a/resources/checks/A004_cluster_info.sh
+++ b/resources/checks/A004_cluster_info.sh
@@ -120,7 +120,7 @@ with data as (
'Temp Files: total number of files per day',
case
when (((extract(epoch from now()) - extract(epoch from data.stats_reset))/86400)::int) <> 0 then
- (temp_files / (((extract(epoch from now()) - extract(epoch from data.stats_reset))/86400)::int))::text
+ round((temp_files / (((extract(epoch from now()) - extract(epoch from data.stats_reset))/86400)))::numeric, 2)::text
else
temp_files::text
end
@@ -134,7 +134,7 @@ with data as (
'Deadlocks per day',
case
when ((extract(epoch from now()) - extract(epoch from data.stats_reset))/86400)::int <> 0 then
- (deadlocks / (((extract(epoch from now()) - extract(epoch from data.stats_reset))/86400)::int))::text
+ round((deadlocks / (((extract(epoch from now()) - extract(epoch from data.stats_reset))/86400)))::numeric, 2)::text
else
deadlocks::text
end
diff --git a/resources/checks/F005_index_bloat.sh b/resources/checks/F005_index_bloat.sh
index 953eace846f4a59a0ae04e6ad047b0a5e036217d..2d4eb3ee5704173947afbdc7fb49839a4ff43188 100755
--- a/resources/checks/F005_index_bloat.sh
+++ b/resources/checks/F005_index_bloat.sh
@@ -1,109 +1,111 @@
+#CHECK_HOST_CMD="sh -c"
+#_PSQL="psql -U postila_ru -t -0 "
${CHECK_HOST_CMD} "${_PSQL} -f -" < 0 as is_na
- from pg_attribute as a
- join (
- select
- tbl.oid tblid, nspname, tbl.relname AS tblname, idx.relname AS idxname, idx.reltuples, idx.relpages, idx.relam,
- indrelid, indexrelid, indkey::smallint[] AS attnum,
- coalesce(substring(array_to_string(idx.reloptions, ' ') from 'fillfactor=([0-9]+)')::smallint, 90) as fillfactor
- from pg_index
- join pg_class idx on idx.oid = pg_index.indexrelid
- join pg_class tbl on tbl.oid = pg_index.indrelid
- join pg_namespace on pg_namespace.oid = idx.relnamespace
- where pg_index.indisvalid AND tbl.relkind = 'r' AND idx.relpages > 0
- ) as i on a.attrelid = i.indexrelid
- join pg_stats as s on
- s.schemaname = i.nspname
- and (
- (s.tablename = i.tblname and s.attname = pg_catalog.pg_get_indexdef(a.attrelid, a.attnum, true)) -- stats from tbl
- OR (s.tablename = i.idxname AND s.attname = a.attname) -- stats from functionnal cols
- )
- join pg_type as t on a.atttypid = t.oid
- where a.attnum > 0
- group by 1, 2, 3, 4, 5, 6, 7, 8, 9, 10
- ), step2 as (
- select
- *,
- (
- index_tuple_hdr_bm + maxalign
- -- Add padding to the index tuple header to align on MAXALIGN
- - case when index_tuple_hdr_bm % maxalign = 0 THEN maxalign else index_tuple_hdr_bm % maxalign end
- + nulldatawidth + maxalign
- -- Add padding to the data to align on MAXALIGN
- - case
- when nulldatawidth = 0 then 0
- when nulldatawidth::integer % maxalign = 0 then maxalign
- else nulldatawidth::integer % maxalign
- end
- )::numeric as nulldatahdrwidth
- -- , index_tuple_hdr_bm, nulldatawidth -- (DEBUG INFO)
- from step1
- ), step3 as (
- select
- *,
- -- ItemIdData size + computed avg size of a tuple (nulldatahdrwidth)
- coalesce(1 + ceil(reltuples / floor((bs - pageopqdata - pagehdr) / (4 + nulldatahdrwidth)::float)), 0) as est_pages,
- coalesce(1 + ceil(reltuples / floor((bs - pageopqdata - pagehdr) * fillfactor / (100 * (4 + nulldatahdrwidth)::float))), 0) as est_pages_ff
- -- , stattuple.pgstatindex(quote_ident(nspname)||'.'||quote_ident(idxname)) AS pst, index_tuple_hdr_bm, maxalign, pagehdr, nulldatawidth, nulldatahdrwidth, reltuples -- (DEBUG INFO)
- from step2
- join pg_am am on step2.relam = am.oid
- where am.amname = 'btree'
- ), step4 as (
- SELECT
- *,
- bs*(relpages)::bigint AS real_size,
- -------current_database(), nspname AS schemaname, tblname, idxname, bs*(relpages)::bigint AS real_size,
- bs*(relpages-est_pages)::bigint AS extra_size,
- 100 * (relpages-est_pages)::float / relpages AS extra_ratio,
- bs*(relpages-est_pages_ff) AS bloat_size,
- 100 * (relpages-est_pages_ff)::float / relpages AS bloat_ratio
- -- , 100-(sub.pst).avg_leaf_density, est_pages, index_tuple_hdr_bm, maxalign, pagehdr, nulldatawidth, nulldatahdrwidth, sub.reltuples, sub.relpages -- (DEBUG INFO)
- from step3
- -- WHERE NOT is_na
+with overrided_tables as (
+select
+ pc.oid as table_id,
+ pn.nspname as scheme_name,
+ pc.relname as table_name,
+ pc.reloptions as options
+from pg_class pc
+join pg_namespace pn on pn.oid = pc.relnamespace
+where reloptions::text ~ 'autovacuum'
+), step1 as (
+select
+ i.tblid,
+ i.nspname as schema_name,
+ i.tblname as table_name,
+ i.idxname as index_name,
+ i.reltuples,
+ i.relpages,
+ i.relam,
+ a.attrelid AS table_oid,
+ current_setting('block_size')::numeric AS bs,
+ fillfactor,
+ -- MAXALIGN: 4 on 32bits, 8 on 64bits (and mingw32 ?)
+ case when version() ~ 'mingw32|64-bit|x86_64|ppc64|ia64|amd64' then 8 else 4 end as maxalign,
+ /* per page header, fixed size: 20 for 7.X, 24 for others */
+ 24 AS pagehdr,
+ /* per page btree opaque data */
+ 16 AS pageopqdata,
+ /* per tuple header: add IndexAttributeBitMapData if some cols are null-able */
+ case
+ when max(coalesce(s.null_frac,0)) = 0 then 2 -- IndexTupleData size
+ else 2 + (( 32 + 8 - 1 ) / 8) -- IndexTupleData size + IndexAttributeBitMapData size ( max num filed per index + 8 - 1 /8)
+ end as index_tuple_hdr_bm,
+ /* data len: we remove null values save space using it fractionnal part from stats */
+ sum((1 - coalesce(s.null_frac, 0)) * coalesce(s.avg_width, 1024)) as nulldatawidth,
+ max(case when a.atttypid = 'pg_catalog.name'::regtype then 1 else 0 end) > 0 as is_na
+from pg_attribute as a
+join (
+ select
+ tbl.oid tblid, nspname, tbl.relname AS tblname, idx.relname AS idxname, idx.reltuples, idx.relpages, idx.relam,
+ indrelid, indexrelid, indkey::smallint[] AS attnum,
+ coalesce(substring(array_to_string(idx.reloptions, ' ') from 'fillfactor=([0-9]+)')::smallint, 90) as fillfactor
+ from pg_index
+ join pg_class idx on idx.oid = pg_index.indexrelid
+ join pg_class tbl on tbl.oid = pg_index.indrelid
+ join pg_namespace on pg_namespace.oid = idx.relnamespace
+ where pg_index.indisvalid AND tbl.relkind = 'r' AND idx.relpages > 0
+) as i on a.attrelid = i.indexrelid
+join pg_stats as s on
+ s.schemaname = i.nspname
+ and (
+ (s.tablename = i.tblname and s.attname = pg_catalog.pg_get_indexdef(a.attrelid, a.attnum, true)) -- stats from tbl
+ OR (s.tablename = i.idxname AND s.attname = a.attname) -- stats from functionnal cols
)
+join pg_type as t on a.atttypid = t.oid
+where a.attnum > 0
+group by 1, 2, 3, 4, 5, 6, 7, 8, 9, 10
+), step2 as (
+select
+ *,
+ (
+ index_tuple_hdr_bm + maxalign
+ -- Add padding to the index tuple header to align on MAXALIGN
+ - case when index_tuple_hdr_bm % maxalign = 0 THEN maxalign else index_tuple_hdr_bm % maxalign end
+ + nulldatawidth + maxalign
+ -- Add padding to the data to align on MAXALIGN
+ - case
+ when nulldatawidth = 0 then 0
+ when nulldatawidth::integer % maxalign = 0 then maxalign
+ else nulldatawidth::integer % maxalign
+ end
+ )::numeric as nulldatahdrwidth
+ -- , index_tuple_hdr_bm, nulldatawidth -- (DEBUG INFO)
+from step1
+), step3 as (
+select
+ *,
+ -- ItemIdData size + computed avg size of a tuple (nulldatahdrwidth)
+ coalesce(1 + ceil(reltuples / floor((bs - pageopqdata - pagehdr) / (4 + nulldatahdrwidth)::float)), 0) as est_pages,
+ coalesce(1 + ceil(reltuples / floor((bs - pageopqdata - pagehdr) * fillfactor / (100 * (4 + nulldatahdrwidth)::float))), 0) as est_pages_ff
+ -- , stattuple.pgstatindex(quote_ident(nspname)||'.'||quote_ident(idxname)) AS pst, index_tuple_hdr_bm, maxalign, pagehdr, nulldatawidth, nulldatahdrwidth, reltuples -- (DEBUG INFO)
+from step2
+join pg_am am on step2.relam = am.oid
+where am.amname = 'btree'
+), step4 as (
+SELECT
+ *,
+ bs*(relpages)::bigint AS real_size,
+-------current_database(), nspname AS schemaname, tblname, idxname, bs*(relpages)::bigint AS real_size,
+ bs*(relpages-est_pages)::bigint AS extra_size,
+ 100 * (relpages-est_pages)::float / relpages AS extra_ratio,
+ bs*(relpages-est_pages_ff) AS bloat_size,
+ 100 * (relpages-est_pages_ff)::float / relpages AS bloat_ratio
+ -- , 100-(sub.pst).avg_leaf_density, est_pages, index_tuple_hdr_bm, maxalign, pagehdr, nulldatawidth, nulldatahdrwidth, sub.reltuples, sub.relpages -- (DEBUG INFO)
+from step3
+-- WHERE NOT is_na
+), index_data as (
select
case is_na when true then 'TRUE' else '' end as "Is N/A",
format(
- \$out$%s
- (%s)\$out$,
+ \$out$%s (%s)\$out$,
left(index_name, 50) || case when length(index_name) > 50 then '…' else '' end,
coalesce(nullif(step4.schema_name, 'public') || '.', '') || step4.table_name
) as "Index (Table)",
+ quote_ident(index_name) as index_name,
+ coalesce(nullif(quote_ident(step4.schema_name), 'public') || '.', '') || quote_ident(step4.table_name) as table_name,
real_size as "Real size bytes",
pg_size_pretty(real_size::numeric) as "Size",
extra_ratio as "Extra ratio",
@@ -148,7 +150,7 @@ with data as (
left join overrided_tables ot on ot.table_id = step4.tblid
order by real_size desc nulls last
), limited_data as (
- select * from data limit 100
+ select * from index_data limit 100
), limited_json_data as (
select json_object_agg(ld."Index (Table)", ld) as json from limited_data ld
), total_data as (
@@ -159,7 +161,7 @@ with data as (
sum("Bloat size bytes") as "Bloat size bytes sum",
(sum("Bloat size bytes")::numeric/sum("Real size bytes")::numeric * 100) as "Bloat ratio",
sum("Extra size bytes") as "Extra size bytes sum"
- from data
+ from index_data
)
select
json_build_object(
@@ -169,7 +171,6 @@ select
(select row_to_json(total_data) from total_data),
'overrided_settings_count',
(select count(1) from limited_data where overrided_settings = true)
-
)
SQL