Skip to content
Snippets Groups Projects
Commit 1d6a733b authored by Anthony Regeda's avatar Anthony Regeda Committed by Will Rouesnel
Browse files

multi-server-exporter multi server exporter is introduced

parent 72446a5b
No related branches found
No related tags found
No related merge requests found
Loading
Loading
@@ -49,16 +49,19 @@ Package vendoring is handled with [`govendor`](https://github.com/kardianos/gove
Path under which to expose metrics. Default is `/metrics`.
 
* `disable-default-metrics`
Use only metrics supplied from `queries.yaml` via `--extend.query-path`
Use only metrics supplied from `queries.yaml` via `--extend.query-path`.
* `disable-settings-metrics`
Use the flag if you don't want to scrape `pg_settings`.
 
* `extend.query-path`
Path to a YAML file containing custom queries to run. Check out [`queries.yaml`](queries.yaml)
for examples of the format.
* `dumpmaps`
Do not run - print the internal representation of the metric maps. Useful when debugging a custom
queries file.
* `log.level`
Set logging level: one of `debug`, `info`, `warn`, `error`, `fatal`
 
Loading
Loading
@@ -78,21 +81,23 @@ The following environment variables configure the exporter:
URI may contain the username and password to connect with.
 
* `DATA_SOURCE_URI`
an alternative to DATA_SOURCE_NAME which exclusively accepts the raw URI
an alternative to `DATA_SOURCE_NAME` which exclusively accepts the raw URI
without a username and password component.
 
* `DATA_SOURCE_USER`
When using `DATA_SOURCE_URI`, this environment variable is used to specify
the username.
* `DATA_SOURCE_USER_FILE`
The same, but reads the username from a file.
 
* `DATA_SOURCE_PASS`
When using `DATA_SOURCE_URI`, this environment variable is used to specify
the password to connect with.
* `DATA_SOURCE_PASS_FILE`
The same as above but reads the password from a file.
* `PG_EXPORTER_WEB_LISTEN_ADDRESS`
Address to listen on for web interface and telemetry. Default is `:9187`.
 
Loading
Loading
@@ -102,13 +107,16 @@ The following environment variables configure the exporter:
* `PG_EXPORTER_DISABLE_DEFAULT_METRICS`
Use only metrics supplied from `queries.yaml`. Value can be `true` or `false`. Default is `false`.
 
* `PG_EXPORTER_DISABLE_SETTINGS_METRICS`
Use the flag if you don't want to scrape `pg_settings`. Value can be `true` or `false`. Defauls is `false`.
* `PG_EXPORTER_EXTEND_QUERY_PATH`
Path to a YAML file containing custom queries to run. Check out [`queries.yaml`](queries.yaml)
for examples of the format.
 
* `PG_EXPORTER_CONSTANT_LABELS`
Labels to set in all metrics. A list of `label=value` pairs, separated by commas.
Settings set by environment variables starting with `PG_` will be overwritten by the corresponding CLI flag if given.
 
### Setting the Postgres server's data source name
Loading
Loading
@@ -120,6 +128,10 @@ For running it locally on a default Debian/Ubuntu install, this will work (trans
 
sudo -u postgres DATA_SOURCE_NAME="user=postgres host=/var/run/postgresql/ sslmode=disable" postgres_exporter
 
Also, you can set a list of sources to scrape different instances from the one exporter setup. Just define a comma separated string.
sudo -u postgres DATA_SOURCE_NAME="port=5432,port=6432" postgres_exporter
See the [github.com/lib/pq](http://github.com/lib/pq) module for other ways to format the connection string.
 
### Adding new metrics
Loading
Loading
@@ -143,18 +155,18 @@ The -extend.query-path command-line argument specifies a YAML file containing ad
Some examples are provided in [queries.yaml](queries.yaml).
 
### Disabling default metrics
To work with non-officially-supported postgres versions you can try disabling (e.g. 8.2.15)
To work with non-officially-supported postgres versions you can try disabling (e.g. 8.2.15)
or a variant of postgres (e.g. Greenplum) you can disable the default metrics with the `--disable-default-metrics`
flag. This removes all built-in metrics, and uses only metrics defined by queries in the `queries.yaml` file you supply
(so you must supply one, otherwise the exporter will return nothing but internal statuses and not your database).
 
### Running as non-superuser
 
To be able to collect metrics from `pg_stat_activity` and `pg_stat_replication`
as non-superuser you have to create views as a superuser, and assign permissions
separately to those.
To be able to collect metrics from `pg_stat_activity` and `pg_stat_replication`
as non-superuser you have to create views as a superuser, and assign permissions
separately to those.
 
In PostgreSQL, views run with the permissions of the user that created them so
In PostgreSQL, views run with the permissions of the user that created them so
they can act as security barriers.
 
```sql
Loading
Loading
package main
 
import (
"database/sql"
"errors"
"fmt"
"math"
"strconv"
Loading
Loading
@@ -13,8 +11,8 @@ import (
)
 
// Query the pg_settings view containing runtime variables
func querySettings(ch chan<- prometheus.Metric, db *sql.DB) error {
log.Debugln("Querying pg_setting view")
func querySettings(ch chan<- prometheus.Metric, server *Server) error {
log.Debugf("Querying pg_setting view on %q", server)
 
// pg_settings docs: https://www.postgresql.org/docs/current/static/view-pg-settings.html
//
Loading
Loading
@@ -22,9 +20,9 @@ func querySettings(ch chan<- prometheus.Metric, db *sql.DB) error {
// types in normaliseUnit() below
query := "SELECT name, setting, COALESCE(unit, ''), short_desc, vartype FROM pg_settings WHERE vartype IN ('bool', 'integer', 'real');"
 
rows, err := db.Query(query)
rows, err := server.db.Query(query)
if err != nil {
return errors.New(fmt.Sprintln("Error running query on database: ", namespace, err))
return fmt.Errorf("Error running query on database %q: %s %v", server, namespace, err)
}
defer rows.Close() // nolint: errcheck
 
Loading
Loading
@@ -32,10 +30,10 @@ func querySettings(ch chan<- prometheus.Metric, db *sql.DB) error {
s := &pgSetting{}
err = rows.Scan(&s.name, &s.setting, &s.unit, &s.shortDesc, &s.vartype)
if err != nil {
return errors.New(fmt.Sprintln("Error retrieving rows:", namespace, err))
return fmt.Errorf("Error retrieving rows on %q: %s %v", server, namespace, err)
}
 
ch <- s.metric()
ch <- s.metric(server.labels)
}
 
return nil
Loading
Loading
@@ -47,7 +45,7 @@ type pgSetting struct {
name, setting, unit, shortDesc, vartype string
}
 
func (s *pgSetting) metric() prometheus.Metric {
func (s *pgSetting) metric(labels prometheus.Labels) prometheus.Metric {
var (
err error
name = strings.Replace(s.name, ".", "_", -1)
Loading
Loading
@@ -78,7 +76,7 @@ func (s *pgSetting) metric() prometheus.Metric {
panic(fmt.Sprintf("Unsupported vartype %q", s.vartype))
}
 
desc := newDesc(subsystem, name, shortDesc)
desc := newDesc(subsystem, name, shortDesc, labels)
return prometheus.MustNewConstMetric(desc, prometheus.GaugeValue, val)
}
 
Loading
Loading
Loading
Loading
@@ -3,6 +3,7 @@
package main
 
import (
"github.com/prometheus/client_golang/prometheus"
dto "github.com/prometheus/client_model/go"
. "gopkg.in/check.v1"
)
Loading
Loading
@@ -25,7 +26,7 @@ var fixtures = []fixture{
unit: "seconds",
err: "",
},
d: "Desc{fqName: \"pg_settings_seconds_fixture_metric_seconds\", help: \"Foo foo foo [Units converted to seconds.]\", constLabels: {}, variableLabels: []}",
d: `Desc{fqName: "pg_settings_seconds_fixture_metric_seconds", help: "Foo foo foo [Units converted to seconds.]", constLabels: {}, variableLabels: []}`,
v: 5,
},
{
Loading
Loading
@@ -41,7 +42,7 @@ var fixtures = []fixture{
unit: "seconds",
err: "",
},
d: "Desc{fqName: \"pg_settings_milliseconds_fixture_metric_seconds\", help: \"Foo foo foo [Units converted to seconds.]\", constLabels: {}, variableLabels: []}",
d: `Desc{fqName: "pg_settings_milliseconds_fixture_metric_seconds", help: "Foo foo foo [Units converted to seconds.]", constLabels: {}, variableLabels: []}`,
v: 5,
},
{
Loading
Loading
@@ -57,7 +58,7 @@ var fixtures = []fixture{
unit: "bytes",
err: "",
},
d: "Desc{fqName: \"pg_settings_eight_kb_fixture_metric_bytes\", help: \"Foo foo foo [Units converted to bytes.]\", constLabels: {}, variableLabels: []}",
d: `Desc{fqName: "pg_settings_eight_kb_fixture_metric_bytes", help: "Foo foo foo [Units converted to bytes.]", constLabels: {}, variableLabels: []}`,
v: 139264,
},
{
Loading
Loading
@@ -73,7 +74,7 @@ var fixtures = []fixture{
unit: "bytes",
err: "",
},
d: "Desc{fqName: \"pg_settings_16_kb_real_fixture_metric_bytes\", help: \"Foo foo foo [Units converted to bytes.]\", constLabels: {}, variableLabels: []}",
d: `Desc{fqName: "pg_settings_16_kb_real_fixture_metric_bytes", help: "Foo foo foo [Units converted to bytes.]", constLabels: {}, variableLabels: []}`,
v: 49152,
},
{
Loading
Loading
@@ -89,7 +90,7 @@ var fixtures = []fixture{
unit: "bytes",
err: "",
},
d: "Desc{fqName: \"pg_settings_16_mb_real_fixture_metric_bytes\", help: \"Foo foo foo [Units converted to bytes.]\", constLabels: {}, variableLabels: []}",
d: `Desc{fqName: "pg_settings_16_mb_real_fixture_metric_bytes", help: "Foo foo foo [Units converted to bytes.]", constLabels: {}, variableLabels: []}`,
v: 5.0331648e+07,
},
{
Loading
Loading
@@ -105,7 +106,7 @@ var fixtures = []fixture{
unit: "bytes",
err: "",
},
d: "Desc{fqName: \"pg_settings_32_mb_real_fixture_metric_bytes\", help: \"Foo foo foo [Units converted to bytes.]\", constLabels: {}, variableLabels: []}",
d: `Desc{fqName: "pg_settings_32_mb_real_fixture_metric_bytes", help: "Foo foo foo [Units converted to bytes.]", constLabels: {}, variableLabels: []}`,
v: 1.00663296e+08,
},
{
Loading
Loading
@@ -121,7 +122,7 @@ var fixtures = []fixture{
unit: "bytes",
err: "",
},
d: "Desc{fqName: \"pg_settings_64_mb_real_fixture_metric_bytes\", help: \"Foo foo foo [Units converted to bytes.]\", constLabels: {}, variableLabels: []}",
d: `Desc{fqName: "pg_settings_64_mb_real_fixture_metric_bytes", help: "Foo foo foo [Units converted to bytes.]", constLabels: {}, variableLabels: []}`,
v: 2.01326592e+08,
},
{
Loading
Loading
@@ -137,7 +138,7 @@ var fixtures = []fixture{
unit: "",
err: "",
},
d: "Desc{fqName: \"pg_settings_bool_on_fixture_metric\", help: \"Foo foo foo\", constLabels: {}, variableLabels: []}",
d: `Desc{fqName: "pg_settings_bool_on_fixture_metric", help: "Foo foo foo", constLabels: {}, variableLabels: []}`,
v: 1,
},
{
Loading
Loading
@@ -153,7 +154,7 @@ var fixtures = []fixture{
unit: "",
err: "",
},
d: "Desc{fqName: \"pg_settings_bool_off_fixture_metric\", help: \"Foo foo foo\", constLabels: {}, variableLabels: []}",
d: `Desc{fqName: "pg_settings_bool_off_fixture_metric", help: "Foo foo foo", constLabels: {}, variableLabels: []}`,
v: 0,
},
{
Loading
Loading
@@ -169,7 +170,7 @@ var fixtures = []fixture{
unit: "seconds",
err: "",
},
d: "Desc{fqName: \"pg_settings_special_minus_one_value_seconds\", help: \"foo foo foo [Units converted to seconds.]\", constLabels: {}, variableLabels: []}",
d: `Desc{fqName: "pg_settings_special_minus_one_value_seconds", help: "foo foo foo [Units converted to seconds.]", constLabels: {}, variableLabels: []}`,
v: -1,
},
{
Loading
Loading
@@ -185,7 +186,7 @@ var fixtures = []fixture{
unit: "",
err: "",
},
d: "Desc{fqName: \"pg_settings_rds_rds_superuser_reserved_connections\", help: \"Sets the number of connection slots reserved for rds_superusers.\", constLabels: {}, variableLabels: []}",
d: `Desc{fqName: "pg_settings_rds_rds_superuser_reserved_connections", help: "Sets the number of connection slots reserved for rds_superusers.", constLabels: {}, variableLabels: []}`,
v: 2,
},
{
Loading
Loading
@@ -233,7 +234,7 @@ func (s *PgSettingSuite) TestMetric(c *C) {
 
for _, f := range fixtures {
d := &dto.Metric{}
m := f.p.metric()
m := f.p.metric(prometheus.Labels{})
m.Write(d) // nolint: errcheck
 
c.Check(m.Desc().String(), Equals, f.d)
Loading
Loading
This diff is collapsed.
Loading
Loading
@@ -7,11 +7,11 @@ package main
 
import (
"os"
"strings"
"testing"
 
. "gopkg.in/check.v1"
 
"database/sql"
"fmt"
 
_ "github.com/lib/pq"
Loading
Loading
@@ -31,7 +31,7 @@ func (s *IntegrationSuite) SetUpSuite(c *C) {
dsn := os.Getenv("DATA_SOURCE_NAME")
c.Assert(dsn, Not(Equals), "")
 
exporter := NewExporter(dsn, false, "")
exporter := NewExporter(strings.Split(dsn, ","))
c.Assert(exporter, NotNil)
// Assign the exporter to the suite
s.e = exporter
Loading
Loading
@@ -48,29 +48,31 @@ func (s *IntegrationSuite) TestAllNamespacesReturnResults(c *C) {
}
}()
 
// Open a database connection
db, err := sql.Open("postgres", s.e.dsn)
c.Assert(db, NotNil)
c.Assert(err, IsNil)
defer db.Close()
for _, dsn := range s.e.dsn {
// Open a database connection
server, err := NewServer(dsn)
c.Assert(server, NotNil)
c.Assert(err, IsNil)
 
// Do a version update
err = s.e.checkMapVersions(ch, db)
c.Assert(err, IsNil)
// Do a version update
err = s.e.checkMapVersions(ch, server)
c.Assert(err, IsNil)
 
err = querySettings(ch, db)
if !c.Check(err, Equals, nil) {
fmt.Println("## ERRORS FOUND")
fmt.Println(err)
}
err = querySettings(ch, server)
if !c.Check(err, Equals, nil) {
fmt.Println("## ERRORS FOUND")
fmt.Println(err)
}
 
// This should never happen in our test cases.
errMap := queryNamespaceMappings(ch, db, s.e.metricMap, s.e.queryOverrides)
if !c.Check(len(errMap), Equals, 0) {
fmt.Println("## NAMESPACE ERRORS FOUND")
for namespace, err := range errMap {
fmt.Println(namespace, ":", err)
// This should never happen in our test cases.
errMap := queryNamespaceMappings(ch, server)
if !c.Check(len(errMap), Equals, 0) {
fmt.Println("## NAMESPACE ERRORS FOUND")
for namespace, err := range errMap {
fmt.Println(namespace, ":", err)
}
}
server.Close()
}
}
 
Loading
Loading
@@ -86,12 +88,12 @@ func (s *IntegrationSuite) TestInvalidDsnDoesntCrash(c *C) {
}()
 
// Send a bad DSN
exporter := NewExporter("invalid dsn", false, *queriesPath)
exporter := NewExporter([]string{"invalid dsn"})
c.Assert(exporter, NotNil)
exporter.scrape(ch)
 
// Send a DSN to a non-listening port.
exporter = NewExporter("postgresql://nothing:nothing@127.0.0.1:1/nothing", false, *queriesPath)
exporter = NewExporter([]string{"postgresql://nothing:nothing@127.0.0.1:1/nothing"})
c.Assert(exporter, NotNil)
exporter.scrape(ch)
}
Loading
Loading
@@ -109,7 +111,7 @@ func (s *IntegrationSuite) TestUnknownMetricParsingDoesntCrash(c *C) {
dsn := os.Getenv("DATA_SOURCE_NAME")
c.Assert(dsn, Not(Equals), "")
 
exporter := NewExporter(dsn, false, "")
exporter := NewExporter(strings.Split(dsn, ","))
c.Assert(exporter, NotNil)
 
// Convert the default maps into a list of empty maps.
Loading
Loading
Loading
Loading
@@ -10,6 +10,7 @@ import (
"os"
 
"github.com/blang/semver"
"github.com/prometheus/client_golang/prometheus"
)
 
// Hook up gocheck into the "go test" runner.
Loading
Loading
@@ -34,7 +35,7 @@ func (s *FunctionalSuite) TestSemanticVersionColumnDiscard(c *C) {
 
{
// No metrics should be eliminated
resultMap := makeDescMap(semver.MustParse("0.0.1"), testMetricMap)
resultMap := makeDescMap(semver.MustParse("0.0.1"), prometheus.Labels{}, testMetricMap)
c.Check(
resultMap["test_namespace"].columnMappings["metric_which_stays"].discard,
Equals,
Loading
Loading
@@ -55,7 +56,7 @@ func (s *FunctionalSuite) TestSemanticVersionColumnDiscard(c *C) {
testMetricMap["test_namespace"]["metric_which_discards"] = discardableMetric
 
// Discard metric should be discarded
resultMap := makeDescMap(semver.MustParse("0.0.1"), testMetricMap)
resultMap := makeDescMap(semver.MustParse("0.0.1"), prometheus.Labels{}, testMetricMap)
c.Check(
resultMap["test_namespace"].columnMappings["metric_which_stays"].discard,
Equals,
Loading
Loading
@@ -76,7 +77,7 @@ func (s *FunctionalSuite) TestSemanticVersionColumnDiscard(c *C) {
testMetricMap["test_namespace"]["metric_which_discards"] = discardableMetric
 
// Discard metric should be discarded
resultMap := makeDescMap(semver.MustParse("0.0.2"), testMetricMap)
resultMap := makeDescMap(semver.MustParse("0.0.2"), prometheus.Labels{}, testMetricMap)
c.Check(
resultMap["test_namespace"].columnMappings["metric_which_stays"].discard,
Equals,
Loading
Loading
@@ -92,7 +93,6 @@ func (s *FunctionalSuite) TestSemanticVersionColumnDiscard(c *C) {
 
// test read username and password from file
func (s *FunctionalSuite) TestEnvironmentSettingWithSecretsFiles(c *C) {
err := os.Setenv("DATA_SOURCE_USER_FILE", "./tests/username_file")
c.Assert(err, IsNil)
defer UnsetEnvironment(c, "DATA_SOURCE_USER_FILE")
Loading
Loading
@@ -107,29 +107,33 @@ func (s *FunctionalSuite) TestEnvironmentSettingWithSecretsFiles(c *C) {
 
var expected = "postgresql://custom_username$&+,%2F%3A;=%3F%40:custom_password$&+,%2F%3A;=%3F%40@localhost:5432/?sslmode=disable"
 
dsn := getDataSource()
if dsn != expected {
c.Errorf("Expected Username to be read from file. Found=%v, expected=%v", dsn, expected)
dsn := getDataSources()
if len(dsn) == 0 {
c.Errorf("Expected one data source, zero found")
}
if dsn[0] != expected {
c.Errorf("Expected Username to be read from file. Found=%v, expected=%v", dsn[0], expected)
}
}
 
// test read DATA_SOURCE_NAME from environment
func (s *FunctionalSuite) TestEnvironmentSettingWithDns(c *C) {
envDsn := "postgresql://user:password@localhost:5432/?sslmode=enabled"
err := os.Setenv("DATA_SOURCE_NAME", envDsn)
c.Assert(err, IsNil)
defer UnsetEnvironment(c, "DATA_SOURCE_NAME")
 
dsn := getDataSource()
if dsn != envDsn {
c.Errorf("Expected Username to be read from file. Found=%v, expected=%v", dsn, envDsn)
dsn := getDataSources()
if len(dsn) == 0 {
c.Errorf("Expected one data source, zero found")
}
if dsn[0] != envDsn {
c.Errorf("Expected Username to be read from file. Found=%v, expected=%v", dsn[0], envDsn)
}
}
 
// test DATA_SOURCE_NAME is used even if username and password environment variables are set
func (s *FunctionalSuite) TestEnvironmentSettingWithDnsAndSecrets(c *C) {
envDsn := "postgresql://userDsn:passwordDsn@localhost:55432/?sslmode=disabled"
err := os.Setenv("DATA_SOURCE_NAME", envDsn)
c.Assert(err, IsNil)
Loading
Loading
@@ -143,9 +147,12 @@ func (s *FunctionalSuite) TestEnvironmentSettingWithDnsAndSecrets(c *C) {
c.Assert(err, IsNil)
defer UnsetEnvironment(c, "DATA_SOURCE_PASS")
 
dsn := getDataSource()
if dsn != envDsn {
c.Errorf("Expected Username to be read from file. Found=%v, expected=%v", dsn, envDsn)
dsn := getDataSources()
if len(dsn) == 0 {
c.Errorf("Expected one data source, zero found")
}
if dsn[0] != envDsn {
c.Errorf("Expected Username to be read from file. Found=%v, expected=%v", dsn[0], envDsn)
}
}
 
Loading
Loading
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment