Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • gitlab-org/build/omnibus-mirror/postgres_exporter
1 result
Show changes
Commits on Source (36)
Showing
with 794 additions and 428 deletions
*
!bin/
Loading
Loading
@@ -14,3 +14,7 @@
/release
/*.prom
/.metrics.*.*.prom
/.metrics.*.*.prom.unique
/.assets-branch
/.metrics.*.added
/.metrics.*.removed
Loading
Loading
@@ -3,7 +3,7 @@ services:
- docker
language: go
go:
- '1.10'
- '1.11'
before_install:
- go get -v github.com/mattn/goveralls
- sudo wget -O /usr/local/bin/p2 https://github.com/wrouesnel/p2cli/releases/download/r4/p2
Loading
Loading
@@ -12,6 +12,7 @@ before_install:
&& sudo chmod +x /usr/local/bin/docker-compose
- sudo apt-get update && sudo apt-get install postgresql-client-common
script:
- ./gh-assets-clone.sh
- go run mage.go -v all
- "$HOME/gopath/bin/goveralls -coverprofile=cover.out -service=travis-ci"
- go run mage.go docker
Loading
Loading
@@ -21,10 +22,17 @@ after_success:
; docker push wrouesnel/postgres_exporter:$TRAVIS_TAG ; fi
- if [ "$TRAVIS_BRANCH" == "master" ]; then docker push wrouesnel/postgres_exporter
; fi
- ./postgres-metrics-get-changes.sh .assets-branch/metriclists
- if [ "$TRAVIS_BRANCH" == "master" ]; then ./gh-metrics-push.sh ; fi
env:
global:
- DOCKER_USER=wrouesnel
- secure: f0H5HKL/5f/ZZVGZ7puegWZ6eig0TmruihuSEJCx1+Y6yDZn7l8lH+eETP9KAzH27c3CG1F9ytu/3gnnTOafXnDLlCve3fL5sKF3+pNQRwi3IojsODjdfPW+KEbG+1RD7IgkCn+DSRmvvpLr4zGOmZFEM1ZtLL878u4Hsrv/X5pDbKJgG/cXDRJfsu/EcpviO4WM8zOakBY8QihXhGpZiRtpRDCXWjW49PdCkW9hsfzFaU1yjvih9EJ0cfcH+9CFCRkezwAPlCETbOv288uHXc6bCuEEX1bgJ0ZzEXYAyoO00+12ePbQZEGNikSVT55nfC+jZLLTavQkFi862Hcx/lmJpA/7aeNYOrDcomwWMRRc4Ava2+cod7acVvo45SHRq+Jj9ofDhj9s0T/aZwV+2doc9GwDN9J6aEs9Nham2G955K1H0fmMW9lv0ThSVEZ3XbzCHyR4nPAwJQXrzauqbbihCim/g/YC5gbVs7O/4GkN2Z9LK30IJr1/NtJdIa6fMk3Zdhp6LGbXCvVFRbE0rMiTLbB8O3ll2smCu3aFYv7J9IfvI0ol0ww7kULpyf/vqxkK0NJXsKgoK/Uo1lM9gNpJBHsMt9nWnDvLj2DKZNTqkxzJeG8O98ADrQWEGFhpcsSsbW9pAMsrp6D4LQikN8KoFvh9F8h9lBsYpafzlOA=
- GIT_ASSETS_BRANCH=assets
- secure: 04rZWNM20asC0CHqgSa9OwU5d0Zl8sTPXdaHwm3mR75xWcoQwLiNpIzQpdrtH21VPqj78CdwBXvt1ZAcl4SZaNxEFbvblSz2CZ/3dAeM3uj+LShn/wMgQGdCALzF1hlpnDOQsFpdY5GqKS28rYF14EZDT4ZGf+uBamwCq9QNPdApypAqQCfJml8TYeUbJfgvTWoVNMfy5eIiWjYAUriXE258UY4z6ErWqs00A3cmLadkjQaDaNowcjTiFA2qRUcJckLZmbo+3mqa0N2DST0uXflvwOMmPJzjVNXkzs6hX7GVrIjfK0ttud0SV6iatuB7LdAYBlNvMrPJHAEchdR/xhfXv0obx/CdWBSo55sWy/DPNGh0BNgdohX/IcGaVX2318ca81+jD4IEJ//0DCVpmfhi+xM7N2Tmpstus6ToBJy2ZWwP9xOuchrSqU5+NPb+xJarRnDclBydQNJpJm+WIZo9nPawggLaDkpoHxppgg3e+f4Mc/bVM5H/v/Qi36Gk2gH4m2ZHDjEJdyHf066L2avKvsIPiW9xobNmUPkCGzDVmCB9vGCn8BNnm3mdHMBcYxaIUOFcSyLi8c0xk9gLvVcDMKn0jDNuegQ5HAfw4zTER4VdRVx4e4iF4NUgj+LmvLXNdJlEAXUu5m4cJDlWfgftMPLTtn3rncq5IgNrVO0=
branches:
except:
- assets
deploy:
skip_cleanup: true
provider: releases
Loading
Loading
COVERDIR = .coverage
TOOLDIR = tools
BINDIR = bin
RELEASEDIR = release
DIRS = $(BINDIR) $(RELEASEDIR)
GO_SRC := $(shell find . -name '*.go' ! -path '*/vendor/*' ! -path 'tools/*' ! -path 'bin/*' ! -path 'release/*' )
GO_DIRS := $(shell find . -type d -name '*.go' ! -path '*/vendor/*' ! -path 'tools/*' ! -path 'bin/*' ! -path 'release/*' )
GO_PKGS := $(shell go list ./... | grep -v '/vendor/')
CONTAINER_NAME ?= wrouesnel/postgres_exporter:latest
BINARY := $(shell basename $(shell pwd))
VERSION ?= $(shell git describe --dirty 2>/dev/null)
VERSION_SHORT ?= $(shell git describe --abbrev=0 2>/dev/null)
ifeq ($(VERSION),)
VERSION := v0.0.0
endif
ifeq ($(VERSION_SHORT),)
VERSION_SHORT := v0.0.0
endif
# By default this list is filtered down to some common platforms.
platforms := $(subst /,-,$(shell go tool dist list | grep -e linux -e windows -e darwin | grep -e 386 -e amd64))
PLATFORM_BINS_TMP := $(patsubst %,$(BINDIR)/$(BINARY)_$(VERSION_SHORT)_%/$(BINARY),$(platforms))
PLATFORM_BINS := $(patsubst $(BINDIR)/$(BINARY)_$(VERSION_SHORT)_windows-%/$(BINARY),$(BINDIR)/$(BINARY)_$(VERSION_SHORT)_windows-%/$(BINARY).exe,$(PLATFORM_BINS_TMP))
PLATFORM_DIRS := $(patsubst %,$(BINDIR)/$(BINARY)_$(VERSION_SHORT)_%,$(platforms))
PLATFORM_TARS := $(patsubst %,$(RELEASEDIR)/$(BINARY)_$(VERSION_SHORT)_%.tar.gz,$(platforms))
# These are evaluated on use, and so will have the correct values in the build
# rule (https://vic.demuzere.be/articles/golang-makefile-crosscompile/)
PLATFORMS_TEMP = $(subst /, ,$(subst -, ,$(patsubst $(BINDIR)/$(BINARY)_$(VERSION_SHORT)_%,%,$@)))
GOOS = $(word 1, $(PLATFORMS_TEMP))
GOARCH = $(word 2, $(PLATFORMS_TEMP))
CURRENT_PLATFORM_TMP := $(BINDIR)/$(BINARY)_$(VERSION_SHORT)_$(shell go env GOOS)-$(shell go env GOARCH)/$(BINARY)
CURRENT_PLATFORM := $(patsubst $(BINDIR)/$(BINARY)_$(VERSION_SHORT)_windows-%/$(BINARY),$(BINDIR)/$(BINARY)_$(VERSION_SHORT)_windows-%/$(BINARY).exe,$(CURRENT_PLATFORM_TMP))
CONCURRENT_LINTERS ?=
ifeq ($(CONCURRENT_LINTERS),)
CONCURRENT_LINTERS = $(shell gometalinter --help | grep -o 'concurrency=\w*' | cut -d= -f2 | cut -d' ' -f1)
endif
LINTER_DEADLINE ?= 30s
$(shell mkdir -p $(DIRS))
export PATH := $(TOOLDIR)/bin:$(PATH)
SHELL := env PATH=$(PATH) /bin/bash
all: style lint test binary
binary: $(BINARY)
$(BINARY): $(CURRENT_PLATFORM)
ln -sf $< $@
$(PLATFORM_BINS): $(GO_SRC)
CGO_ENABLED=0 GOOS=$(GOOS) GOARCH=$(GOARCH) go build -a \
-ldflags "-extldflags '-static' -X main.Version=$(VERSION)" \
-o $@ .
$(PLATFORM_DIRS): $(PLATFORM_BINS)
$(PLATFORM_TARS): $(RELEASEDIR)/%.tar.gz : $(BINDIR)/%
tar -czf $@ -C $(BINDIR) $$(basename $<)
release-bin: $(PLATFORM_BINS)
release: $(PLATFORM_TARS)
# Take a go build and turn it into a minimal container
docker: $(CURRENT_PLATFORM)
docker build --build-arg=binary=$(CURRENT_PLATFORM) -t $(CONTAINER_NAME) .
style: tools
gometalinter --disable-all --enable=gofmt --vendor
lint: tools
@echo Using $(CONCURRENT_LINTERS) processes
gometalinter -j $(CONCURRENT_LINTERS) --deadline=$(LINTER_DEADLINE) --disable=gotype --disable=gocyclo $(GO_DIRS)
fmt: tools
gofmt -s -w $(GO_SRC)
postgres_exporter_integration_test: $(GO_SRC)
CGO_ENABLED=0 go test -c -tags integration \
-a -ldflags "-extldflags '-static' -X main.Version=$(VERSION)" \
-o postgres_exporter_integration_test -cover -covermode count .
test: tools
@mkdir -p $(COVERDIR)
@rm -f $(COVERDIR)/*
for pkg in $(GO_PKGS) ; do \
go test -v -covermode count -coverprofile=$(COVERDIR)/$$(echo $$pkg | tr '/' '-').out $$pkg || exit 1 ; \
done
gocovmerge $(shell find $(COVERDIR) -name '*.out') > cover.test.out
test-integration: postgres_exporter postgres_exporter_integration_test
tests/test-smoke "$(shell pwd)/postgres_exporter" "$(shell pwd)/postgres_exporter_integration_test_script $(shell pwd)/postgres_exporter_integration_test $(shell pwd)/cover.integration.out"
cover.out: tools
gocovmerge cover.*.out > cover.out
clean:
[ ! -z $(BINDIR) ] && [ -e $(BINDIR) ] && find $(BINDIR) -print -delete || /bin/true
[ ! -z $(COVERDIR) ] && [ -e $(COVERDIR) ] && find $(COVERDIR) -print -delete || /bin/true
[ ! -z $(RELEASEDIR) ] && [ -e $(RELEASEDIR) ] && find $(RELEASEDIR) -print -delete || /bin/true
rm -f postgres_exporter postgres_exporter_integration_test
tools:
$(MAKE) -C $(TOOLDIR)
.PHONY: tools style fmt test all release binary clean
Loading
Loading
@@ -5,7 +5,8 @@
# PostgreSQL Server Exporter
 
Prometheus exporter for PostgreSQL server metrics.
Supported Postgres versions: 9.1 and up.
CI Tested PostgreSQL versions: `9.1`, `9.2`, `9.3`, `9.4`, `9.5`, `9.6`, `10`, `11`
 
## Quick Start
This package is available for Docker:
Loading
Loading
@@ -13,7 +14,7 @@ This package is available for Docker:
# Start an example database
docker run --net=host -it --rm -e POSTGRES_PASSWORD=password postgres
# Connect to it
docker run --net=host -e DATA_SOURCE_NAME="postgresql://postgres:password@localhost:5432/?sslmode=disable" wrouesnel/postgres_exporter
docker run --net=host -e DATA_SOURCE_NAME="postgresql://postgres:password@localhost:5432/postgres?sslmode=disable" wrouesnel/postgres_exporter
```
 
## Building and running
Loading
Loading
@@ -42,22 +43,25 @@ Package vendoring is handled with [`govendor`](https://github.com/kardianos/gove
### Flags
 
* `web.listen-address`
Address to listen on for web interface and telemetry.
Address to listen on for web interface and telemetry. Default is `:9187`.
 
* `web.telemetry-path`
Path under which to expose metrics.
Path under which to expose metrics. Default is `/metrics`.
 
* `disable-default-metrics`
Use only metrics supplied from `queries.yaml` via `--extend.query-path`
Use only metrics supplied from `queries.yaml` via `--extend.query-path`.
* `disable-settings-metrics`
Use the flag if you don't want to scrape `pg_settings`.
 
* `extend.query-path`
Path to a YAML file containing custom queries to run. Check out [`queries.yaml`](queries.yaml)
for examples of the format.
* `dumpmaps`
Do not run - print the internal representation of the metric maps. Useful when debugging a custom
queries file.
* `log.level`
Set logging level: one of `debug`, `info`, `warn`, `error`, `fatal`
 
Loading
Loading
@@ -65,6 +69,9 @@ Package vendoring is handled with [`govendor`](https://github.com/kardianos/gove
Set the log output target and format. e.g. `logger:syslog?appname=bob&local=7` or `logger:stdout?json=true`
Defaults to `logger:stderr`.
 
* `constantLabels`
Labels to set in all metrics. A list of `label=value` pairs, separated by commas.
### Environment Variables
 
The following environment variables configure the exporter:
Loading
Loading
@@ -74,21 +81,44 @@ The following environment variables configure the exporter:
URI may contain the username and password to connect with.
 
* `DATA_SOURCE_URI`
an alternative to DATA_SOURCE_NAME which exclusively accepts the raw URI
an alternative to `DATA_SOURCE_NAME` which exclusively accepts the raw URI
without a username and password component.
 
* `DATA_SOURCE_USER`
When using `DATA_SOURCE_URI`, this environment variable is used to specify
the username.
* `DATA_SOURCE_USER_FILE`
The same, but reads the username from a file.
 
* `DATA_SOURCE_PASS`
When using `DATA_SOURCE_URI`, this environment variable is used to specify
the password to connect with.
* `DATA_SOURCE_PASS_FILE`
The same as above but reads the password from a file.
 
* `PG_EXPORTER_WEB_LISTEN_ADDRESS`
Address to listen on for web interface and telemetry. Default is `:9187`.
* `PG_EXPORTER_WEB_TELEMETRY_PATH`
Path under which to expose metrics. Default is `/metrics`.
* `PG_EXPORTER_DISABLE_DEFAULT_METRICS`
Use only metrics supplied from `queries.yaml`. Value can be `true` or `false`. Default is `false`.
* `PG_EXPORTER_DISABLE_SETTINGS_METRICS`
Use the flag if you don't want to scrape `pg_settings`. Value can be `true` or `false`. Defauls is `false`.
* `PG_EXPORTER_EXTEND_QUERY_PATH`
Path to a YAML file containing custom queries to run. Check out [`queries.yaml`](queries.yaml)
for examples of the format.
* `PG_EXPORTER_CONSTANT_LABELS`
Labels to set in all metrics. A list of `label=value` pairs, separated by commas.
Settings set by environment variables starting with `PG_` will be overwritten by the corresponding CLI flag if given.
### Setting the Postgres server's data source name
 
The PostgreSQL server's [data source name](http://en.wikipedia.org/wiki/Data_source_name)
Loading
Loading
@@ -98,6 +128,10 @@ For running it locally on a default Debian/Ubuntu install, this will work (trans
 
sudo -u postgres DATA_SOURCE_NAME="user=postgres host=/var/run/postgresql/ sslmode=disable" postgres_exporter
 
Also, you can set a list of sources to scrape different instances from the one exporter setup. Just define a comma separated string.
sudo -u postgres DATA_SOURCE_NAME="port=5432,port=6432" postgres_exporter
See the [github.com/lib/pq](http://github.com/lib/pq) module for other ways to format the connection string.
 
### Adding new metrics
Loading
Loading
@@ -121,21 +155,27 @@ The -extend.query-path command-line argument specifies a YAML file containing ad
Some examples are provided in [queries.yaml](queries.yaml).
 
### Disabling default metrics
To work with non-officially-supported postgres versions you can try disabling (e.g. 8.2.15)
To work with non-officially-supported postgres versions you can try disabling (e.g. 8.2.15)
or a variant of postgres (e.g. Greenplum) you can disable the default metrics with the `--disable-default-metrics`
flag. This removes all built-in metrics, and uses only metrics defined by queries in the `queries.yaml` file you supply
(so you must supply one, otherwise the exporter will return nothing but internal statuses and not your database).
 
### Running as non-superuser
 
To be able to collect metrics from pg_stat_activity and pg_stat_replication as non-superuser you have to create views as a superuser, and assign permissions separately to those. In PostgreSQL, views run with the permissions of the user that created them so they can act as security barriers.
To be able to collect metrics from `pg_stat_activity` and `pg_stat_replication`
as non-superuser you have to create views as a superuser, and assign permissions
separately to those.
In PostgreSQL, views run with the permissions of the user that created them so
they can act as security barriers.
 
```sql
CREATE USER postgres_exporter PASSWORD 'password';
ALTER USER postgres_exporter SET SEARCH_PATH TO postgres_exporter,pg_catalog;
 
-- If deploying as non-superuser (for example in AWS RDS)
-- GRANT postgres_exporter TO :MASTER_USER;
-- If deploying as non-superuser (for example in AWS RDS), uncomment the GRANT
-- line below and replace <MASTER_USER> with your root user.
-- GRANT postgres_exporter TO <MASTER_USER>
CREATE SCHEMA postgres_exporter AUTHORIZATION postgres_exporter;
 
CREATE VIEW postgres_exporter.pg_stat_activity
Loading
Loading
package main
 
import (
"database/sql"
"errors"
"fmt"
"math"
"strconv"
Loading
Loading
@@ -13,8 +11,8 @@ import (
)
 
// Query the pg_settings view containing runtime variables
func querySettings(ch chan<- prometheus.Metric, db *sql.DB) error {
log.Debugln("Querying pg_setting view")
func querySettings(ch chan<- prometheus.Metric, server *Server) error {
log.Debugf("Querying pg_setting view on %q", server)
 
// pg_settings docs: https://www.postgresql.org/docs/current/static/view-pg-settings.html
//
Loading
Loading
@@ -22,9 +20,9 @@ func querySettings(ch chan<- prometheus.Metric, db *sql.DB) error {
// types in normaliseUnit() below
query := "SELECT name, setting, COALESCE(unit, ''), short_desc, vartype FROM pg_settings WHERE vartype IN ('bool', 'integer', 'real');"
 
rows, err := db.Query(query)
rows, err := server.db.Query(query)
if err != nil {
return errors.New(fmt.Sprintln("Error running query on database: ", namespace, err))
return fmt.Errorf("Error running query on database %q: %s %v", server, namespace, err)
}
defer rows.Close() // nolint: errcheck
 
Loading
Loading
@@ -32,10 +30,10 @@ func querySettings(ch chan<- prometheus.Metric, db *sql.DB) error {
s := &pgSetting{}
err = rows.Scan(&s.name, &s.setting, &s.unit, &s.shortDesc, &s.vartype)
if err != nil {
return errors.New(fmt.Sprintln("Error retrieving rows:", namespace, err))
return fmt.Errorf("Error retrieving rows on %q: %s %v", server, namespace, err)
}
 
ch <- s.metric()
ch <- s.metric(server.labels)
}
 
return nil
Loading
Loading
@@ -47,7 +45,7 @@ type pgSetting struct {
name, setting, unit, shortDesc, vartype string
}
 
func (s *pgSetting) metric() prometheus.Metric {
func (s *pgSetting) metric(labels prometheus.Labels) prometheus.Metric {
var (
err error
name = strings.Replace(s.name, ".", "_", -1)
Loading
Loading
@@ -78,7 +76,7 @@ func (s *pgSetting) metric() prometheus.Metric {
panic(fmt.Sprintf("Unsupported vartype %q", s.vartype))
}
 
desc := newDesc(subsystem, name, shortDesc)
desc := newDesc(subsystem, name, shortDesc, labels)
return prometheus.MustNewConstMetric(desc, prometheus.GaugeValue, val)
}
 
Loading
Loading
@@ -96,7 +94,7 @@ func (s *pgSetting) normaliseUnit() (val float64, unit string, err error) {
return
case "ms", "s", "min", "h", "d":
unit = "seconds"
case "kB", "MB", "GB", "TB", "8kB", "16kB", "32kB", "16MB":
case "B", "kB", "MB", "GB", "TB", "8kB", "16kB", "32kB", "16MB", "32MB", "64MB":
unit = "bytes"
default:
err = fmt.Errorf("Unknown unit for runtime variable: %q", s.unit)
Loading
Loading
@@ -133,6 +131,10 @@ func (s *pgSetting) normaliseUnit() (val float64, unit string, err error) {
val *= math.Pow(2, 15)
case "16MB":
val *= math.Pow(2, 24)
case "32MB":
val *= math.Pow(2, 25)
case "64MB":
val *= math.Pow(2, 26)
}
 
return
Loading
Loading
Loading
Loading
@@ -3,6 +3,7 @@
package main
 
import (
"github.com/prometheus/client_golang/prometheus"
dto "github.com/prometheus/client_model/go"
. "gopkg.in/check.v1"
)
Loading
Loading
@@ -25,7 +26,7 @@ var fixtures = []fixture{
unit: "seconds",
err: "",
},
d: "Desc{fqName: \"pg_settings_seconds_fixture_metric_seconds\", help: \"Foo foo foo [Units converted to seconds.]\", constLabels: {}, variableLabels: []}",
d: `Desc{fqName: "pg_settings_seconds_fixture_metric_seconds", help: "Foo foo foo [Units converted to seconds.]", constLabels: {}, variableLabels: []}`,
v: 5,
},
{
Loading
Loading
@@ -41,7 +42,7 @@ var fixtures = []fixture{
unit: "seconds",
err: "",
},
d: "Desc{fqName: \"pg_settings_milliseconds_fixture_metric_seconds\", help: \"Foo foo foo [Units converted to seconds.]\", constLabels: {}, variableLabels: []}",
d: `Desc{fqName: "pg_settings_milliseconds_fixture_metric_seconds", help: "Foo foo foo [Units converted to seconds.]", constLabels: {}, variableLabels: []}`,
v: 5,
},
{
Loading
Loading
@@ -57,7 +58,7 @@ var fixtures = []fixture{
unit: "bytes",
err: "",
},
d: "Desc{fqName: \"pg_settings_eight_kb_fixture_metric_bytes\", help: \"Foo foo foo [Units converted to bytes.]\", constLabels: {}, variableLabels: []}",
d: `Desc{fqName: "pg_settings_eight_kb_fixture_metric_bytes", help: "Foo foo foo [Units converted to bytes.]", constLabels: {}, variableLabels: []}`,
v: 139264,
},
{
Loading
Loading
@@ -73,7 +74,7 @@ var fixtures = []fixture{
unit: "bytes",
err: "",
},
d: "Desc{fqName: \"pg_settings_16_kb_real_fixture_metric_bytes\", help: \"Foo foo foo [Units converted to bytes.]\", constLabels: {}, variableLabels: []}",
d: `Desc{fqName: "pg_settings_16_kb_real_fixture_metric_bytes", help: "Foo foo foo [Units converted to bytes.]", constLabels: {}, variableLabels: []}`,
v: 49152,
},
{
Loading
Loading
@@ -89,9 +90,41 @@ var fixtures = []fixture{
unit: "bytes",
err: "",
},
d: "Desc{fqName: \"pg_settings_16_mb_real_fixture_metric_bytes\", help: \"Foo foo foo [Units converted to bytes.]\", constLabels: {}, variableLabels: []}",
d: `Desc{fqName: "pg_settings_16_mb_real_fixture_metric_bytes", help: "Foo foo foo [Units converted to bytes.]", constLabels: {}, variableLabels: []}`,
v: 5.0331648e+07,
},
{
p: pgSetting{
name: "32_mb_real_fixture_metric",
setting: "3.0",
unit: "32MB",
shortDesc: "Foo foo foo",
vartype: "real",
},
n: normalised{
val: 1.00663296e+08,
unit: "bytes",
err: "",
},
d: `Desc{fqName: "pg_settings_32_mb_real_fixture_metric_bytes", help: "Foo foo foo [Units converted to bytes.]", constLabels: {}, variableLabels: []}`,
v: 1.00663296e+08,
},
{
p: pgSetting{
name: "64_mb_real_fixture_metric",
setting: "3.0",
unit: "64MB",
shortDesc: "Foo foo foo",
vartype: "real",
},
n: normalised{
val: 2.01326592e+08,
unit: "bytes",
err: "",
},
d: `Desc{fqName: "pg_settings_64_mb_real_fixture_metric_bytes", help: "Foo foo foo [Units converted to bytes.]", constLabels: {}, variableLabels: []}`,
v: 2.01326592e+08,
},
{
p: pgSetting{
name: "bool_on_fixture_metric",
Loading
Loading
@@ -105,7 +138,7 @@ var fixtures = []fixture{
unit: "",
err: "",
},
d: "Desc{fqName: \"pg_settings_bool_on_fixture_metric\", help: \"Foo foo foo\", constLabels: {}, variableLabels: []}",
d: `Desc{fqName: "pg_settings_bool_on_fixture_metric", help: "Foo foo foo", constLabels: {}, variableLabels: []}`,
v: 1,
},
{
Loading
Loading
@@ -121,7 +154,7 @@ var fixtures = []fixture{
unit: "",
err: "",
},
d: "Desc{fqName: \"pg_settings_bool_off_fixture_metric\", help: \"Foo foo foo\", constLabels: {}, variableLabels: []}",
d: `Desc{fqName: "pg_settings_bool_off_fixture_metric", help: "Foo foo foo", constLabels: {}, variableLabels: []}`,
v: 0,
},
{
Loading
Loading
@@ -137,7 +170,7 @@ var fixtures = []fixture{
unit: "seconds",
err: "",
},
d: "Desc{fqName: \"pg_settings_special_minus_one_value_seconds\", help: \"foo foo foo [Units converted to seconds.]\", constLabels: {}, variableLabels: []}",
d: `Desc{fqName: "pg_settings_special_minus_one_value_seconds", help: "foo foo foo [Units converted to seconds.]", constLabels: {}, variableLabels: []}`,
v: -1,
},
{
Loading
Loading
@@ -153,7 +186,7 @@ var fixtures = []fixture{
unit: "",
err: "",
},
d: "Desc{fqName: \"pg_settings_rds_rds_superuser_reserved_connections\", help: \"Sets the number of connection slots reserved for rds_superusers.\", constLabels: {}, variableLabels: []}",
d: `Desc{fqName: "pg_settings_rds_rds_superuser_reserved_connections", help: "Sets the number of connection slots reserved for rds_superusers.", constLabels: {}, variableLabels: []}`,
v: 2,
},
{
Loading
Loading
@@ -201,7 +234,7 @@ func (s *PgSettingSuite) TestMetric(c *C) {
 
for _, f := range fixtures {
d := &dto.Metric{}
m := f.p.metric()
m := f.p.metric(prometheus.Labels{})
m.Write(d) // nolint: errcheck
 
c.Check(m.Desc().String(), Equals, f.d)
Loading
Loading
Loading
Loading
@@ -7,11 +7,11 @@ package main
 
import (
"os"
"strings"
"testing"
 
. "gopkg.in/check.v1"
 
"database/sql"
"fmt"
 
_ "github.com/lib/pq"
Loading
Loading
@@ -31,7 +31,7 @@ func (s *IntegrationSuite) SetUpSuite(c *C) {
dsn := os.Getenv("DATA_SOURCE_NAME")
c.Assert(dsn, Not(Equals), "")
 
exporter := NewExporter(dsn, false, "")
exporter := NewExporter(strings.Split(dsn, ","))
c.Assert(exporter, NotNil)
// Assign the exporter to the suite
s.e = exporter
Loading
Loading
@@ -48,29 +48,31 @@ func (s *IntegrationSuite) TestAllNamespacesReturnResults(c *C) {
}
}()
 
// Open a database connection
db, err := sql.Open("postgres", s.e.dsn)
c.Assert(db, NotNil)
c.Assert(err, IsNil)
defer db.Close()
for _, dsn := range s.e.dsn {
// Open a database connection
server, err := NewServer(dsn)
c.Assert(server, NotNil)
c.Assert(err, IsNil)
 
// Do a version update
err = s.e.checkMapVersions(ch, db)
c.Assert(err, IsNil)
// Do a version update
err = s.e.checkMapVersions(ch, server)
c.Assert(err, IsNil)
 
err = querySettings(ch, db)
if !c.Check(err, Equals, nil) {
fmt.Println("## ERRORS FOUND")
fmt.Println(err)
}
err = querySettings(ch, server)
if !c.Check(err, Equals, nil) {
fmt.Println("## ERRORS FOUND")
fmt.Println(err)
}
 
// This should never happen in our test cases.
errMap := queryNamespaceMappings(ch, db, s.e.metricMap, s.e.queryOverrides)
if !c.Check(len(errMap), Equals, 0) {
fmt.Println("## NAMESPACE ERRORS FOUND")
for namespace, err := range errMap {
fmt.Println(namespace, ":", err)
// This should never happen in our test cases.
errMap := queryNamespaceMappings(ch, server)
if !c.Check(len(errMap), Equals, 0) {
fmt.Println("## NAMESPACE ERRORS FOUND")
for namespace, err := range errMap {
fmt.Println(namespace, ":", err)
}
}
server.Close()
}
}
 
Loading
Loading
@@ -86,12 +88,12 @@ func (s *IntegrationSuite) TestInvalidDsnDoesntCrash(c *C) {
}()
 
// Send a bad DSN
exporter := NewExporter("invalid dsn", false, *queriesPath)
exporter := NewExporter([]string{"invalid dsn"})
c.Assert(exporter, NotNil)
exporter.scrape(ch)
 
// Send a DSN to a non-listening port.
exporter = NewExporter("postgresql://nothing:nothing@127.0.0.1:1/nothing", false, *queriesPath)
exporter = NewExporter([]string{"postgresql://nothing:nothing@127.0.0.1:1/nothing"})
c.Assert(exporter, NotNil)
exporter.scrape(ch)
}
Loading
Loading
@@ -109,7 +111,7 @@ func (s *IntegrationSuite) TestUnknownMetricParsingDoesntCrash(c *C) {
dsn := os.Getenv("DATA_SOURCE_NAME")
c.Assert(dsn, Not(Equals), "")
 
exporter := NewExporter(dsn, false, "")
exporter := NewExporter(strings.Split(dsn, ","))
c.Assert(exporter, NotNil)
 
// Convert the default maps into a list of empty maps.
Loading
Loading
Loading
Loading
@@ -3,6 +3,7 @@
package main
 
import (
"reflect"
"testing"
 
. "gopkg.in/check.v1"
Loading
Loading
@@ -10,6 +11,7 @@ import (
"os"
 
"github.com/blang/semver"
"github.com/prometheus/client_golang/prometheus"
)
 
// Hook up gocheck into the "go test" runner.
Loading
Loading
@@ -34,7 +36,7 @@ func (s *FunctionalSuite) TestSemanticVersionColumnDiscard(c *C) {
 
{
// No metrics should be eliminated
resultMap := makeDescMap(semver.MustParse("0.0.1"), testMetricMap)
resultMap := makeDescMap(semver.MustParse("0.0.1"), prometheus.Labels{}, testMetricMap)
c.Check(
resultMap["test_namespace"].columnMappings["metric_which_stays"].discard,
Equals,
Loading
Loading
@@ -55,7 +57,7 @@ func (s *FunctionalSuite) TestSemanticVersionColumnDiscard(c *C) {
testMetricMap["test_namespace"]["metric_which_discards"] = discardableMetric
 
// Discard metric should be discarded
resultMap := makeDescMap(semver.MustParse("0.0.1"), testMetricMap)
resultMap := makeDescMap(semver.MustParse("0.0.1"), prometheus.Labels{}, testMetricMap)
c.Check(
resultMap["test_namespace"].columnMappings["metric_which_stays"].discard,
Equals,
Loading
Loading
@@ -76,7 +78,7 @@ func (s *FunctionalSuite) TestSemanticVersionColumnDiscard(c *C) {
testMetricMap["test_namespace"]["metric_which_discards"] = discardableMetric
 
// Discard metric should be discarded
resultMap := makeDescMap(semver.MustParse("0.0.2"), testMetricMap)
resultMap := makeDescMap(semver.MustParse("0.0.2"), prometheus.Labels{}, testMetricMap)
c.Check(
resultMap["test_namespace"].columnMappings["metric_which_stays"].discard,
Equals,
Loading
Loading
@@ -92,7 +94,6 @@ func (s *FunctionalSuite) TestSemanticVersionColumnDiscard(c *C) {
 
// test read username and password from file
func (s *FunctionalSuite) TestEnvironmentSettingWithSecretsFiles(c *C) {
err := os.Setenv("DATA_SOURCE_USER_FILE", "./tests/username_file")
c.Assert(err, IsNil)
defer UnsetEnvironment(c, "DATA_SOURCE_USER_FILE")
Loading
Loading
@@ -105,31 +106,35 @@ func (s *FunctionalSuite) TestEnvironmentSettingWithSecretsFiles(c *C) {
c.Assert(err, IsNil)
defer UnsetEnvironment(c, "DATA_SOURCE_URI")
 
var expected = "postgresql://custom_username:custom_password@localhost:5432/?sslmode=disable"
var expected = "postgresql://custom_username$&+,%2F%3A;=%3F%40:custom_password$&+,%2F%3A;=%3F%40@localhost:5432/?sslmode=disable"
 
dsn := getDataSource()
if dsn != expected {
c.Errorf("Expected Username to be read from file. Found=%v, expected=%v", dsn, expected)
dsn := getDataSources()
if len(dsn) == 0 {
c.Errorf("Expected one data source, zero found")
}
if dsn[0] != expected {
c.Errorf("Expected Username to be read from file. Found=%v, expected=%v", dsn[0], expected)
}
}
 
// test read DATA_SOURCE_NAME from environment
func (s *FunctionalSuite) TestEnvironmentSettingWithDns(c *C) {
envDsn := "postgresql://user:password@localhost:5432/?sslmode=enabled"
err := os.Setenv("DATA_SOURCE_NAME", envDsn)
c.Assert(err, IsNil)
defer UnsetEnvironment(c, "DATA_SOURCE_NAME")
 
dsn := getDataSource()
if dsn != envDsn {
c.Errorf("Expected Username to be read from file. Found=%v, expected=%v", dsn, envDsn)
dsn := getDataSources()
if len(dsn) == 0 {
c.Errorf("Expected one data source, zero found")
}
if dsn[0] != envDsn {
c.Errorf("Expected Username to be read from file. Found=%v, expected=%v", dsn[0], envDsn)
}
}
 
// test DATA_SOURCE_NAME is used even if username and password environment variables are set
func (s *FunctionalSuite) TestEnvironmentSettingWithDnsAndSecrets(c *C) {
envDsn := "postgresql://userDsn:passwordDsn@localhost:55432/?sslmode=disabled"
err := os.Setenv("DATA_SOURCE_NAME", envDsn)
c.Assert(err, IsNil)
Loading
Loading
@@ -143,9 +148,12 @@ func (s *FunctionalSuite) TestEnvironmentSettingWithDnsAndSecrets(c *C) {
c.Assert(err, IsNil)
defer UnsetEnvironment(c, "DATA_SOURCE_PASS")
 
dsn := getDataSource()
if dsn != envDsn {
c.Errorf("Expected Username to be read from file. Found=%v, expected=%v", dsn, envDsn)
dsn := getDataSources()
if len(dsn) == 0 {
c.Errorf("Expected one data source, zero found")
}
if dsn[0] != envDsn {
c.Errorf("Expected Username to be read from file. Found=%v, expected=%v", dsn[0], envDsn)
}
}
 
Loading
Loading
@@ -177,6 +185,80 @@ func (s *FunctionalSuite) TestPostgresVersionParsing(c *C) {
}
}
 
func (s *FunctionalSuite) TestParseFingerprint(c *C) {
cases := []struct {
url string
fingerprint string
err string
}{
{
url: "postgresql://userDsn:passwordDsn@localhost:55432/?sslmode=disabled",
fingerprint: "localhost:55432",
},
{
url: "port=1234",
fingerprint: "localhost:1234",
},
{
url: "host=example",
fingerprint: "example:5432",
},
{
url: "xyz",
err: "malformed dsn \"xyz\"",
},
}
for _, cs := range cases {
f, err := parseFingerprint(cs.url)
if cs.err == "" {
c.Assert(err, IsNil)
} else {
c.Assert(err, NotNil)
c.Assert(err.Error(), Equals, cs.err)
}
c.Assert(f, Equals, cs.fingerprint)
}
}
func (s *FunctionalSuite) TestParseConstLabels(c *C) {
cases := []struct {
s string
labels prometheus.Labels
}{
{
s: "a=b",
labels: prometheus.Labels{
"a": "b",
},
},
{
s: "",
labels: prometheus.Labels{},
},
{
s: "a=b, c=d",
labels: prometheus.Labels{
"a": "b",
"c": "d",
},
},
{
s: "a=b, xyz",
labels: prometheus.Labels{
"a": "b",
},
},
}
for _, cs := range cases {
labels := parseConstLabels(cs.s)
if !reflect.DeepEqual(labels, cs.labels) {
c.Fatalf("labels not equal (%v -> %v)", labels, cs.labels)
}
}
}
func UnsetEnvironment(c *C, d string) {
err := os.Unsetenv(d)
c.Assert(err, IsNil)
Loading
Loading
Loading
Loading
@@ -2,6 +2,7 @@ FROM postgres:{{VERSION}}
MAINTAINER Daniel Dent (https://www.danieldent.com)
ENV PG_MAX_WAL_SENDERS 8
ENV PG_WAL_KEEP_SEGMENTS 8
RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y inetutils-ping
COPY setup-replication.sh /docker-entrypoint-initdb.d/
COPY docker-entrypoint.sh /docker-entrypoint.sh
RUN chmod +x /docker-entrypoint-initdb.d/setup-replication.sh /docker-entrypoint.sh
Loading
Loading
@@ -36,7 +36,7 @@ if [ "$1" = 'postgres' ]; then
if [ "x$REPLICATE_FROM" == "x" ]; then
eval "gosu postgres initdb $POSTGRES_INITDB_ARGS"
else
until ping -c 1 -W 1 ${REPLICATE_FROM}
until /bin/ping -c 1 -W 1 ${REPLICATE_FROM}
do
echo "Waiting for master to ping..."
sleep 1s
Loading
Loading
@@ -88,7 +88,7 @@ if [ "$1" = 'postgres' ]; then
: ${POSTGRES_DB:=$POSTGRES_USER}
export POSTGRES_USER POSTGRES_DB
 
psql=( psql -v ON_ERROR_STOP=1 )
psql=( "psql" "-v" "ON_ERROR_STOP=1" )
 
if [ "$POSTGRES_DB" != 'postgres' ]; then
"${psql[@]}" --username postgres <<-EOSQL
Loading
Loading
Loading
Loading
@@ -23,7 +23,7 @@ echo "Test Binary: $test_binary" 1>&2
[ -z "$postgres_exporter" ] && echo "Missing exporter binary" && exit 1
[ -z "$test_binary" ] && echo "Missing test binary" && exit 1
 
cd $DIR
cd "$DIR" || exit 1
 
VERSIONS=( \
9.1 \
Loading
Loading
@@ -33,42 +33,47 @@ VERSIONS=( \
9.5 \
9.6 \
10 \
11 \
)
 
wait_for_postgres(){
local ip=$1
local port=$2
if [ -z $ip ]; then
if [ -z "$ip" ]; then
echo "No IP specified." 1>&2
exit 1
fi
if [ -z $port ]; then
if [ -z "$port" ]; then
echo "No port specified." 1>&2
exit 1
fi
local wait_start=$(date +%s)
local wait_start
wait_start=$(date +%s) || exit 1
echo "Waiting for postgres to start listening..."
while ! pg_isready --host=$ip --port=$port &> /dev/null; do
if [ $(( $(date +%s) - $wait_start )) -gt $TIMEOUT ]; then
while ! pg_isready --host="$ip" --port="$port" &> /dev/null; do
if [ $(( $(date +%s) - wait_start )) -gt "$TIMEOUT" ]; then
echo "Timed out waiting for postgres to start!" 1>&2
exit 1
fi
sleep 1
done
echo "Postgres is online at $ip:$port"
}
 
wait_for_exporter() {
local wait_start=$(date +%s)
local wait_start
wait_start=$(date +%s) || exit 1
echo "Waiting for exporter to start..."
while ! nc -z localhost $exporter_port ; do
if [ $(( $(date +%s) - $wait_start )) -gt $TIMEOUT ]; then
while ! nc -z localhost "$exporter_port" ; do
if [ $(( $(date +%s) - wait_start )) -gt "$TIMEOUT" ]; then
echo "Timed out waiting for exporter!" 1>&2
exit 1
fi
sleep 1
done
echo "Exporter is online at localhost:$exporter_port"
}
 
smoketest_postgres() {
Loading
Loading
@@ -87,23 +92,24 @@ smoketest_postgres() {
CONTAINER_NAME=$($docker_cmd)
standalone_ip=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' $CONTAINER_NAME)
# shellcheck disable=SC2064
trap "docker logs $CONTAINER_NAME ; docker kill $CONTAINER_NAME ; docker rm -v $CONTAINER_NAME; exit 1" EXIT INT TERM
wait_for_postgres $standalone_ip 5432
wait_for_postgres "$standalone_ip" 5432
 
# Run the test binary.
DATA_SOURCE_NAME="postgresql://postgres:$POSTGRES_PASSWORD@$standalone_ip:5432/?sslmode=disable" $test_binary || exit $?
 
# Extract a raw metric list.
DATA_SOURCE_NAME="postgresql://postgres:$POSTGRES_PASSWORD@$standalone_ip:5432/?sslmode=disable" $postgres_exporter --log.level=debug --web.listen-address=:$exporter_port &
DATA_SOURCE_NAME="postgresql://postgres:$POSTGRES_PASSWORD@$standalone_ip:5432/?sslmode=disable" $postgres_exporter \
--log.level=debug --web.listen-address=:$exporter_port &
exporter_pid=$!
# shellcheck disable=SC2064
trap "docker logs $CONTAINER_NAME ; docker kill $CONTAINER_NAME ; docker rm -v $CONTAINER_NAME; kill $exporter_pid; exit 1" EXIT INT TERM
wait_for_exporter
 
# Dump the metrics to a file.
wget -q -O - http://localhost:$exporter_port/metrics 1> $METRICS_DIR/.metrics.single.$version.prom
if [ "$?" != "0" ]; then
echo "Failed on postgres $version ($DOCKER_IMAGE)" 1>&2
if ! wget -q -O - http://localhost:$exporter_port/metrics 1> "$METRICS_DIR/.metrics.single.$version.prom" ; then
echo "Failed on postgres $version (standalone $DOCKER_IMAGE)" 1>&2
kill $exporter_pid
exit 1
fi
Loading
Loading
@@ -116,44 +122,44 @@ smoketest_postgres() {
fi
 
kill $exporter_pid
docker kill $CONTAINER_NAME
docker rm -v $CONTAINER_NAME
docker kill "$CONTAINER_NAME"
docker rm -v "$CONTAINER_NAME"
trap - EXIT INT TERM
echo "#######################"
echo "Replicated Postgres $version"
echo "#######################"
old_pwd=$(pwd)
cd docker-postgres-replication
cd docker-postgres-replication || exit 1
VERSION=$version p2 -t Dockerfile.p2 -o Dockerfile
if [ "$?" != "0" ]; then
if ! VERSION="$version" p2 -t Dockerfile.p2 -o Dockerfile ; then
echo "Templating failed" 1>&2
exit 1
fi
trap "docker-compose logs; docker-compose down ; docker-compose rm -v; exit 1" EXIT INT TERM
local compose_cmd="POSTGRES_PASSWORD=$POSTGRES_PASSWORD docker-compose up -d --force-recreate --build"
echo "Compose Cmd: $compose_cmd"
eval $compose_cmd
eval "$compose_cmd"
master_container=$(docker-compose ps -q pg-master)
slave_container=$(docker-compose ps -q pg-slave)
master_ip=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' $master_container)
slave_ip=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' $slave_container)
master_ip=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' "$master_container")
slave_ip=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' "$slave_container")
echo "Got master IP: $master_ip"
wait_for_postgres $master_ip 5432
wait_for_postgres $slave_ip 5432
wait_for_postgres "$master_ip" 5432
wait_for_postgres "$slave_ip" 5432
DATA_SOURCE_NAME="postgresql://postgres:$POSTGRES_PASSWORD@$master_ip:5432/?sslmode=disable" $test_binary || exit $?
DATA_SOURCE_NAME="postgresql://postgres:$POSTGRES_PASSWORD@$master_ip:5432/?sslmode=disable" $postgres_exporter --log.level=debug --web.listen-address=:$exporter_port &
DATA_SOURCE_NAME="postgresql://postgres:$POSTGRES_PASSWORD@$master_ip:5432/?sslmode=disable" $postgres_exporter \
--log.level=debug --web.listen-address=:$exporter_port &
exporter_pid=$!
# shellcheck disable=SC2064
trap "docker-compose logs; docker-compose down ; docker-compose rm -v ; kill $exporter_pid; exit 1" EXIT INT TERM
wait_for_exporter
 
wget -q -O - http://localhost:$exporter_port/metrics 1> $METRICS_DIR/.metrics.replicated.$version.prom
if [ "$?" != "0" ]; then
echo "Failed on postgres $version ($DOCKER_IMAGE)" 1>&2
if ! wget -q -O - http://localhost:$exporter_port/metrics 1> "$METRICS_DIR/.metrics.replicated.$version.prom" ; then
echo "Failed on postgres $version (replicated $DOCKER_IMAGE)" 1>&2
exit 1
fi
 
Loading
Loading
@@ -162,15 +168,15 @@ smoketest_postgres() {
docker-compose rm -v
trap - EXIT INT TERM
cd $old_pwd
cd "$old_pwd" || exit 1
}
 
# Start pulling the docker images in advance
for version in ${VERSIONS[@]}; do
docker pull postgres:$version > /dev/null &
for version in "${VERSIONS[@]}"; do
docker pull "postgres:$version" > /dev/null &
done
 
for version in ${VERSIONS[@]}; do
for version in "${VERSIONS[@]}"; do
echo "Testing postgres version $version"
smoketest_postgres $version
smoketest_postgres "$version"
done
custom_username
custom_username$&+,/:;=?@
custom_password
custom_password$&+,/:;=?@
#!/bin/bash
# Script to setup the assets clone of the repository using GIT_ASSETS_BRANCH and
# GIT_API_KEY.
[ ! -z "$GIT_ASSETS_BRANCH" ] || exit 1
setup_git() {
git config --global user.email "travis@travis-ci.org" || exit 1
git config --global user.name "Travis CI" || exit 1
}
# Constants
ASSETS_DIR=".assets-branch"
# Clone the assets branch with the correct credentials
git clone --single-branch -b "$GIT_ASSETS_BRANCH" \
"https://${GIT_API_KEY}@github.com/${TRAVIS_REPO_SLUG}.git" "$ASSETS_DIR" || exit 1
#!/bin/bash
# Script to copy and push new metric versions to the assets branch.
[ ! -z "$GIT_ASSETS_BRANCH" ] || exit 1
[ ! -z "$GIT_API_KEY" ] || exit 1
version=$(git describe HEAD) || exit 1
# Constants
ASSETS_DIR=".assets-branch"
METRICS_DIR="$ASSETS_DIR/metriclists"
# Ensure metrics dir exists
mkdir -p "$METRICS_DIR/"
# Remove old files so we spot deletions
rm -f "$METRICS_DIR/.*.unique"
# Copy new files
cp -f -t "$METRICS_DIR/" ./.metrics.*.prom.unique || exit 1
# Enter the assets dir and push.
cd "$ASSETS_DIR" || exit 1
git add "metriclists" || exit 1
git commit -m "Added unique metrics for build from $version" || exit 1
git push origin "$GIT_ASSETS_BRANCH" || exit 1
exit 0
\ No newline at end of file
Loading
Loading
@@ -101,6 +101,7 @@ var platforms []Platform = []Platform{
{"darwin", "386", ""},
{"windows", "amd64", ".exe"},
{"windows", "386", ".exe"},
{"freebsd", "amd64", ""},
}
 
// productName can be overridden by environ product name
Loading
Loading
#!/bin/bash
# Script to parse a text exposition format file into a unique list of metrics
# output by the exporter and then build lists of added/removed metrics.
old_src="$1"
if [ ! -d "$old_src" ] ; then
mkdir -p "$old_src"
fi
function generate_add_removed() {
type="$1"
pg_version="$2"
old_version="$3"
new_version="$4"
if [ ! -e "$old_version" ] ; then
touch "$old_version"
fi
comm -23 "$old_version" "$new_version" > ".metrics.${type}.${pg_version}.removed"
comm -13 "$old_version" "$new_version" > ".metrics.${type}.${pg_version}.added"
}
for raw_prom in $(echo .*.prom) ; do
# Get the type and version
type=$(echo "$raw_prom" | cut -d'.' -f3)
pg_version=$(echo "$raw_prom" | cut -d'.' -f4- | sed 's/\.prom$//g')
unique_file="${raw_prom}.unique"
old_unique_file="$old_src/$unique_file"
# Strip, sort and deduplicate the label names
grep -v '#' "$raw_prom" | \
rev | cut -d' ' -f2- | \
rev | cut -d'{' -f1 | \
sort | \
uniq > "$unique_file"
generate_add_removed "$type" "$pg_version" "$old_unique_file" "$unique_file"
done
#!/bin/bash
# Script to determine added and removed metrics.
# Not currently used in CI but useful for inspecting complicated changes.
type=$1
version=$2
old_version=$3
new_version=$4
comm -23 $old_version $new_version > .metrics.${type}.${version}.removed
comm -13 $old_version $new_version > .metrics.${type}.${version}.added