forked from influxdata/telegraf
-
Notifications
You must be signed in to change notification settings - Fork 0
/
postgresql.go
201 lines (165 loc) · 4.89 KB
/
postgresql.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
package postgresql
import (
"bytes"
"fmt"
"strings"
// register in driver.
_ "github.com/jackc/pgx/stdlib"
"github.com/influxdata/telegraf"
"github.com/influxdata/telegraf/internal"
"github.com/influxdata/telegraf/plugins/inputs"
)
type Postgresql struct {
Service
Databases []string
IgnoredDatabases []string
}
var ignoredColumns = map[string]bool{"stats_reset": true}
var sampleConfig = `
## specify address via a url matching:
## postgres://[pqgotest[:password]]@localhost[/dbname]\
## ?sslmode=[disable|verify-ca|verify-full]
## or a simple string:
## host=localhost user=pqgotest password=... sslmode=... dbname=app_production
##
## All connection parameters are optional.
##
## Without the dbname parameter, the driver will default to a database
## with the same name as the user. This dbname is just for instantiating a
## connection with the server and doesn't restrict the databases we are trying
## to grab metrics for.
##
address = "host=localhost user=postgres sslmode=disable"
## A custom name for the database that will be used as the "server" tag in the
## measurement output. If not specified, a default one generated from
## the connection address is used.
# outputaddress = "db01"
## connection configuration.
## maxlifetime - specify the maximum lifetime of a connection.
## default is forever (0s)
max_lifetime = "0s"
## A list of databases to explicitly ignore. If not specified, metrics for all
## databases are gathered. Do NOT use with the 'databases' option.
# ignored_databases = ["postgres", "template0", "template1"]
## A list of databases to pull metrics about. If not specified, metrics for all
## databases are gathered. Do NOT use with the 'ignored_databases' option.
# databases = ["app_production", "testing"]
`
func (p *Postgresql) SampleConfig() string {
return sampleConfig
}
func (p *Postgresql) Description() string {
return "Read metrics from one or many postgresql servers"
}
func (p *Postgresql) IgnoredColumns() map[string]bool {
return ignoredColumns
}
func (p *Postgresql) Gather(acc telegraf.Accumulator) error {
var (
err error
query string
columns []string
)
if len(p.Databases) == 0 && len(p.IgnoredDatabases) == 0 {
query = `SELECT * FROM pg_stat_database`
} else if len(p.IgnoredDatabases) != 0 {
query = fmt.Sprintf(`SELECT * FROM pg_stat_database WHERE datname NOT IN ('%s')`,
strings.Join(p.IgnoredDatabases, "','"))
} else {
query = fmt.Sprintf(`SELECT * FROM pg_stat_database WHERE datname IN ('%s')`,
strings.Join(p.Databases, "','"))
}
rows, err := p.DB.Query(query)
if err != nil {
return err
}
defer rows.Close()
// grab the column information from the result
if columns, err = rows.Columns(); err != nil {
return err
}
for rows.Next() {
err = p.accRow(rows, acc, columns)
if err != nil {
return err
}
}
query = `SELECT * FROM pg_stat_bgwriter`
bg_writer_row, err := p.DB.Query(query)
if err != nil {
return err
}
defer bg_writer_row.Close()
// grab the column information from the result
if columns, err = bg_writer_row.Columns(); err != nil {
return err
}
for bg_writer_row.Next() {
err = p.accRow(bg_writer_row, acc, columns)
if err != nil {
return err
}
}
return bg_writer_row.Err()
}
type scanner interface {
Scan(dest ...interface{}) error
}
func (p *Postgresql) accRow(row scanner, acc telegraf.Accumulator, columns []string) error {
var columnVars []interface{}
var dbname bytes.Buffer
// this is where we'll store the column name with its *interface{}
columnMap := make(map[string]*interface{})
for _, column := range columns {
columnMap[column] = new(interface{})
}
// populate the array of interface{} with the pointers in the right order
for i := 0; i < len(columnMap); i++ {
columnVars = append(columnVars, columnMap[columns[i]])
}
// deconstruct array of variables and send to Scan
err := row.Scan(columnVars...)
if err != nil {
return err
}
if columnMap["datname"] != nil {
// extract the database name from the column map
if dbNameStr, ok := (*columnMap["datname"]).(string); ok {
dbname.WriteString(dbNameStr)
} else {
// PG 12 adds tracking of global objects to pg_stat_database
dbname.WriteString("postgres_global")
}
} else {
dbname.WriteString("postgres")
}
var tagAddress string
tagAddress, err = p.SanitizedAddress()
if err != nil {
return err
}
tags := map[string]string{"server": tagAddress, "db": dbname.String()}
fields := make(map[string]interface{})
for col, val := range columnMap {
_, ignore := ignoredColumns[col]
if !ignore {
fields[col] = *val
}
}
acc.AddFields("postgresql", fields, tags)
return nil
}
func init() {
inputs.Add("postgresql", func() telegraf.Input {
return &Postgresql{
Service: Service{
MaxIdle: 1,
MaxOpen: 1,
MaxLifetime: internal.Duration{
Duration: 0,
},
IsPgBouncer: false,
},
}
})
}