telegraf/plugins/inputs/postgresql/postgresql.go

202 lines
4.9 KiB
Go
Raw Normal View History

2015-05-18 17:46:44 +00:00
package postgresql
import (
"bytes"
"fmt"
"strings"
2015-05-18 17:46:44 +00:00
2017-04-05 00:37:44 +00:00
// register in driver.
_ "github.com/jackc/pgx/stdlib"
"github.com/influxdata/telegraf"
"github.com/influxdata/telegraf/internal"
2016-01-20 18:57:35 +00:00
"github.com/influxdata/telegraf/plugins/inputs"
2015-05-18 17:46:44 +00:00
)
type Postgresql struct {
Service
Databases []string
IgnoredDatabases []string
2015-05-18 17:46:44 +00:00
}
var ignoredColumns = map[string]bool{"stats_reset": true}
var sampleConfig = `
## specify address via a url matching:
## postgres://[pqgotest[:password]]@localhost[/dbname]\
## ?sslmode=[disable|verify-ca|verify-full]
## or a simple string:
## host=localhost user=pqgotest password=... sslmode=... dbname=app_production
##
## All connection parameters are optional.
##
## Without the dbname parameter, the driver will default to a database
## with the same name as the user. This dbname is just for instantiating a
## connection with the server and doesn't restrict the databases we are trying
## to grab metrics for.
##
2015-12-19 00:09:01 +00:00
address = "host=localhost user=postgres sslmode=disable"
## A custom name for the database that will be used as the "server" tag in the
## measurement output. If not specified, a default one generated from
## the connection address is used.
# outputaddress = "db01"
## connection configuration.
## maxlifetime - specify the maximum lifetime of a connection.
## default is forever (0s)
max_lifetime = "0s"
## A list of databases to explicitly ignore. If not specified, metrics for all
## databases are gathered. Do NOT use with the 'databases' option.
# ignored_databases = ["postgres", "template0", "template1"]
## A list of databases to pull metrics about. If not specified, metrics for all
## databases are gathered. Do NOT use with the 'ignored_databases' option.
# databases = ["app_production", "testing"]
`
func (p *Postgresql) SampleConfig() string {
return sampleConfig
}
func (p *Postgresql) Description() string {
return "Read metrics from one or many postgresql servers"
}
func (p *Postgresql) IgnoredColumns() map[string]bool {
return ignoredColumns
}
func (p *Postgresql) Gather(acc telegraf.Accumulator) error {
2017-04-05 00:37:44 +00:00
var (
err error
query string
columns []string
2017-04-05 00:37:44 +00:00
)
if len(p.Databases) == 0 && len(p.IgnoredDatabases) == 0 {
query = `SELECT * FROM pg_stat_database`
} else if len(p.IgnoredDatabases) != 0 {
query = fmt.Sprintf(`SELECT * FROM pg_stat_database WHERE datname NOT IN ('%s')`,
strings.Join(p.IgnoredDatabases, "','"))
} else {
query = fmt.Sprintf(`SELECT * FROM pg_stat_database WHERE datname IN ('%s')`,
strings.Join(p.Databases, "','"))
}
2015-05-18 17:46:44 +00:00
rows, err := p.DB.Query(query)
if err != nil {
return err
}
2015-05-18 17:46:44 +00:00
defer rows.Close()
2015-05-18 17:46:44 +00:00
2015-09-13 16:51:50 +00:00
// grab the column information from the result
if columns, err = rows.Columns(); err != nil {
return err
}
2015-05-18 17:46:44 +00:00
for rows.Next() {
err = p.accRow(rows, acc, columns)
if err != nil {
return err
2015-05-18 17:46:44 +00:00
}
}
2017-04-05 00:37:44 +00:00
query = `SELECT * FROM pg_stat_bgwriter`
bg_writer_row, err := p.DB.Query(query)
if err != nil {
return err
}
defer bg_writer_row.Close()
// grab the column information from the result
if columns, err = bg_writer_row.Columns(); err != nil {
return err
}
2015-05-18 17:46:44 +00:00
for bg_writer_row.Next() {
err = p.accRow(bg_writer_row, acc, columns)
if err != nil {
return err
}
}
return bg_writer_row.Err()
2015-05-18 17:46:44 +00:00
}
type scanner interface {
Scan(dest ...interface{}) error
}
func (p *Postgresql) accRow(row scanner, acc telegraf.Accumulator, columns []string) error {
var columnVars []interface{}
var dbname bytes.Buffer
2015-09-13 16:51:50 +00:00
// this is where we'll store the column name with its *interface{}
columnMap := make(map[string]*interface{})
for _, column := range columns {
columnMap[column] = new(interface{})
}
2015-09-13 16:51:50 +00:00
// populate the array of interface{} with the pointers in the right order
for i := 0; i < len(columnMap); i++ {
columnVars = append(columnVars, columnMap[columns[i]])
}
2015-09-13 16:51:50 +00:00
// deconstruct array of variables and send to Scan
err := row.Scan(columnVars...)
2015-05-18 17:46:44 +00:00
if err != nil {
return err
}
if columnMap["datname"] != nil {
// extract the database name from the column map
if dbNameStr, ok := (*columnMap["datname"]).(string); ok {
dbname.WriteString(dbNameStr)
} else {
// PG 12 adds tracking of global objects to pg_stat_database
dbname.WriteString("postgres_global")
}
} else {
dbname.WriteString("postgres")
}
var tagAddress string
tagAddress, err = p.SanitizedAddress()
if err != nil {
return err
}
tags := map[string]string{"server": tagAddress, "db": dbname.String()}
2015-12-19 00:09:01 +00:00
fields := make(map[string]interface{})
for col, val := range columnMap {
_, ignore := ignoredColumns[col]
if !ignore {
2015-12-19 00:09:01 +00:00
fields[col] = *val
}
}
2015-12-19 00:09:01 +00:00
acc.AddFields("postgresql", fields, tags)
2015-05-18 17:46:44 +00:00
return nil
}
func init() {
inputs.Add("postgresql", func() telegraf.Input {
return &Postgresql{
Service: Service{
MaxIdle: 1,
MaxOpen: 1,
MaxLifetime: internal.Duration{
Duration: 0,
},
2018-08-01 22:44:10 +00:00
IsPgBouncer: false,
},
}
2015-05-18 17:46:44 +00:00
})
}