Import prometheus-postgres-exporter_0.8.0+ds.orig.tar.gz

[dgit import orig prometheus-postgres-exporter_0.8.0+ds.orig.tar.gz]
This commit is contained in:
Martina Ferrari 2020-01-20 13:25:39 +01:00
commit a18fe63c06
34 changed files with 5744 additions and 0 deletions

2
.dockerignore Normal file
View File

@ -0,0 +1,2 @@
*
!bin/

21
.gitignore vendored Normal file
View File

@ -0,0 +1,21 @@
/.build
/postgres_exporter
/postgres_exporter_integration_test
*.tar.gz
*.test
*-stamp
/.idea
/.vscode
*.iml
/cover.out
/cover.*.out
/.coverage
/bin
/release
/*.prom
/.metrics.*.*.prom
/.metrics.*.*.prom.unique
/.assets-branch
/.metrics.*.added
/.metrics.*.removed
/tools/src

46
.travis.yml Normal file
View File

@ -0,0 +1,46 @@
sudo: required
services:
- docker
language: go
go:
- '1.11'
before_install:
- go get -v github.com/mattn/goveralls
- sudo wget -O /usr/local/bin/p2 https://github.com/wrouesnel/p2cli/releases/download/r4/p2
&& sudo chmod +x /usr/local/bin/p2
- sudo wget -O /usr/local/bin/docker-compose https://github.com/docker/compose/releases/download/1.9.0-rc4/docker-compose-Linux-x86_64
&& sudo chmod +x /usr/local/bin/docker-compose
- sudo apt-get update && sudo apt-get install postgresql-client-common
script:
- "./gh-assets-clone.sh"
- go run mage.go -v all
- "$HOME/gopath/bin/goveralls -coverprofile=cover.out -service=travis-ci"
- go run mage.go docker
after_success:
- docker login -u $DOCKER_USER -p $DOCKER_PASS
- if [ ! -z "$TRAVIS_TAG" ]; then docker tag wrouesnel/postgres_exporter:latest wrouesnel/postgres_exporter:$TRAVIS_TAG
; docker push wrouesnel/postgres_exporter:$TRAVIS_TAG ; fi
- if [ "$TRAVIS_BRANCH" == "master" ]; then docker push wrouesnel/postgres_exporter
; fi
- "./postgres-metrics-get-changes.sh .assets-branch/metriclists"
- if [ "$TRAVIS_BRANCH" == "master" ]; then ./gh-metrics-push.sh ; fi
env:
global:
- DOCKER_USER=wrouesnel
- GIT_ASSETS_BRANCH=assets
- secure: sl1d85bipYhHlHTZ4fwkWrZ07px+lPMQrKPaiyQ9i5tylQAcMqwDroK0pb5HIyIl6PEx72D5atQWnEqluA/0rFt3SxqxtvT+wj6CPmmZfh2fUSol7I07QzAsi95d7q0fg2mStDdfs134Uu+JjxGKEGRu2SL3Zq+LKpaNPtIZVBqrCYYAySLiEJx+DEOfwt1ktn/qHapV5d5FYdfd7trfV411NITyA8AGk6Gy0HztRDGbfcoLOsM+CnVi1p59uUL9ck/hL2DbsB44qDKeWQaruMLwWNDETu+EVwHlDEHGBPb+wdDALnW+Ts3CAUpuGXftHV35XLLbH7NXOnS6QiH938ycfPf3INY51lV7cL6bNtFWDKMAIcPf4wQO2ts4qFhuiUeFdo7qrC6uEI5Fy/sELBgWl4O2opVY3Tf8s8OO/DSb4Cxy6solKgaETkl6EcShaEj7H/Cn7vT0+SLKCpSQlvVQXDLGg6eZTyBA+OWNElE0UvWV7znxWBlke+9NARIl4FcB/SY4A6v1ztpandHWMjNLLxZyVxFEswfU9hvf0qL9SW38OJ5cIK8pvmH2QWG7Xg/j0B3o7SHMdsM+pcSwrzsM6OENgvxPNBb/DinmMyQKxTCVcVmMo7uIS89RIylvN79E8U6NagdFkiLfa3xEHq8zCzEkHi3bsLRvytgT2X0=
- secure: 1JL8KcgkLueQ2DLL81UMYzIHX3qm1TjcO40QL2ZOfdirFWBshOiTwTXeWj5qZaGBzoVV5ezhyZaBY+t3/pObslm20ERce879hEw+TSnKN30wfBqNyv2r7rfsbKkXauultb8RNu9y/9XS0DCEyGdSTQh9UaCa4z6ulu39hffDddrGQjwW1P2gT3Npu1cDYd1iSO36rrA6yXjaoN8OW8U4znKVjOGnarxxFnXJkiYv2PfIrZA6BpL3d0syJtWDyr1G+B48oK9VK+fBV9K0G0E67fJvqB3ANXN3D41il3S+cs8Ulcd7hF+LWxpMsP2r1/XHYSDw3Iiz0QFKKzoyxNdipvdjAVDxrWylyLnmTBYzXk41kRv88mKVLBQM1dbzsLXYcsE2pgIZxxq9OHGZ5CUJ8t0oz5D9oXMUy4QOMQ36jZdvD048aB7DGp4EF2J7ILIhUZrHHErOlXotnsYvNMvamNwqB5Jg4NC+y5QHxERJ+HK5oPrLy+iCb2kmWatSB6vO5OeX/F7IRiqtZghJRddEeMdQ1a6H0GeV1BF7Hx8j3TPMJ66qSAb0RA1lQQCN4l+/YMEWmQD8amf1O5NY116waf+Co4qkvt3c4QctQOMwu3Ra7uLlp6GG61OmHhPTCGSv/LZp6CVtROLY5IltKv7qBzksjvXkO1SzhJOxi0JkZmg=
branches:
except:
- assets
deploy:
skip_cleanup: true
provider: releases
api_key:
secure: rwlge/Rs3wnWyfKRhD9fd5GviVe0foYUp20DY3AjKdDjhtwScA1EeR9QHOkB3raze52en0+KkpqlLCWbt3q4CRT7+ku1DNKhd6VWALdTZ1RPJYvNlU6CKJdRnWUJsECmSBsShXlbiYR8axqNVedzFPFGKzS9gYlFN6rr7pez/JZhxqucopZ6I+TkRHMELrFXyQK7/Y2bNRCLC4a+rGsjKeLLtYXbRXCmS0G4BSJEBRk7d69fIRzBApCMfrcLftgHzPuPth616yyUusQSCQYvaZ5tlwrPP8/E0wG3SVJVeDCMuDOSBZ9M6vNzR8W8VR/hxQamegn1OQgC5kNOaLZCTcJ5xguRouqb+FNFBqrd/Zi6vESo7RiVLULawzwxkh9sIPa3WZYDb3VK/Z/cpggUeR7wAu0S5ZYEvJHRefIZpqofZEHzDE3Blqp5yErz05e/zmjpd6HHK3f/UHmRRYfbulkvGT3aL/dlq5GcFvuxVC/vTL2VPvg9cGbqtf7PakC5IhoHpDs35tOyLxifOBLHvkwtGSxEfsCohIG8Hz2XFD83EsxgOiKSXVPLNd6yxjdqZj7OeAKFFU3bzGndnRbDIXaf987IN1imgUtP6wegfImoRStqxN4gEwwIMFsZCF86Ug4eLhlajLbWhudriDxDPBM/F9950aVxLwmWh9l5cRI=
file_glob: true
file: release/*
on:
tags: true
branch: master
repo: wrouesnel/postgres_exporter

15
Dockerfile Normal file
View File

@ -0,0 +1,15 @@
FROM debian:7.11-slim
RUN useradd -u 20001 postgres_exporter
FROM scratch
COPY --from=0 /etc/passwd /etc/passwd
USER postgres_exporter
ARG binary
COPY $binary /postgres_exporter
EXPOSE 9187
ENTRYPOINT [ "/postgres_exporter" ]

13
LICENSE Normal file
View File

@ -0,0 +1,13 @@
Copyright 2018 William Rouesnel
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

270
README.md Normal file
View File

@ -0,0 +1,270 @@
[![Build Status](https://travis-ci.org/wrouesnel/postgres_exporter.svg?branch=master)](https://travis-ci.org/wrouesnel/postgres_exporter)
[![Coverage Status](https://coveralls.io/repos/github/wrouesnel/postgres_exporter/badge.svg?branch=master)](https://coveralls.io/github/wrouesnel/postgres_exporter?branch=master)
[![Go Report Card](https://goreportcard.com/badge/github.com/wrouesnel/postgres_exporter)](https://goreportcard.com/report/github.com/wrouesnel/postgres_exporter)
[![Docker Pulls](https://img.shields.io/docker/pulls/wrouesnel/postgres_exporter.svg)](https://hub.docker.com/r/wrouesnel/postgres_exporter/tags)
# PostgreSQL Server Exporter
Prometheus exporter for PostgreSQL server metrics.
CI Tested PostgreSQL versions: `9.4`, `9.5`, `9.6`, `10`, `11`
## Quick Start
This package is available for Docker:
```
# Start an example database
docker run --net=host -it --rm -e POSTGRES_PASSWORD=password postgres
# Connect to it
docker run --net=host -e DATA_SOURCE_NAME="postgresql://postgres:password@localhost:5432/postgres?sslmode=disable" wrouesnel/postgres_exporter
```
## Building and running
The build system is based on [Mage](https://magefile.org)
The default make file behavior is to build the binary:
```
$ go get github.com/wrouesnel/postgres_exporter
$ cd ${GOPATH-$HOME/go}/src/github.com/wrouesnel/postgres_exporter
$ go run mage.go binary
$ export DATA_SOURCE_NAME="postgresql://login:password@hostname:port/dbname"
$ ./postgres_exporter <flags>
```
To build the dockerfile, run `go run mage.go docker`.
This will build the docker image as `wrouesnel/postgres_exporter:latest`. This
is a minimal docker image containing *just* postgres_exporter. By default no SSL
certificates are included, if you need to use SSL you should either bind-mount
`/etc/ssl/certs/ca-certificates.crt` or derive a new image containing them.
### Vendoring
Package vendoring is handled with [`govendor`](https://github.com/kardianos/govendor)
### Flags
* `help`
Show context-sensitive help (also try --help-long and --help-man).
* `web.listen-address`
Address to listen on for web interface and telemetry. Default is `:9187`.
* `web.telemetry-path`
Path under which to expose metrics. Default is `/metrics`.
* `disable-default-metrics`
Use only metrics supplied from `queries.yaml` via `--extend.query-path`.
* `disable-settings-metrics`
Use the flag if you don't want to scrape `pg_settings`.
* `auto-discover-databases`
Whether to discover the databases on a server dynamically.
* `extend.query-path`
Path to a YAML file containing custom queries to run. Check out [`queries.yaml`](queries.yaml)
for examples of the format.
* `dumpmaps`
Do not run - print the internal representation of the metric maps. Useful when debugging a custom
queries file.
* `constantLabels`
Labels to set in all metrics. A list of `label=value` pairs, separated by commas.
* `version`
Show application version.
* `exclude-databases`
A list of databases to remove when autoDiscoverDatabases is enabled.
* `log.level`
Set logging level: one of `debug`, `info`, `warn`, `error`, `fatal`
* `log.format`
Set the log output target and format. e.g. `logger:syslog?appname=bob&local=7` or `logger:stdout?json=true`
Defaults to `logger:stderr`.
### Environment Variables
The following environment variables configure the exporter:
* `DATA_SOURCE_NAME`
the default legacy format. Accepts URI form and key=value form arguments. The
URI may contain the username and password to connect with.
* `DATA_SOURCE_URI`
an alternative to `DATA_SOURCE_NAME` which exclusively accepts the raw URI
without a username and password component.
* `DATA_SOURCE_URI_FILE`
The same as above but reads the URI from a file.
* `DATA_SOURCE_USER`
When using `DATA_SOURCE_URI`, this environment variable is used to specify
the username.
* `DATA_SOURCE_USER_FILE`
The same, but reads the username from a file.
* `DATA_SOURCE_PASS`
When using `DATA_SOURCE_URI`, this environment variable is used to specify
the password to connect with.
* `DATA_SOURCE_PASS_FILE`
The same as above but reads the password from a file.
* `PG_EXPORTER_WEB_LISTEN_ADDRESS`
Address to listen on for web interface and telemetry. Default is `:9187`.
* `PG_EXPORTER_WEB_TELEMETRY_PATH`
Path under which to expose metrics. Default is `/metrics`.
* `PG_EXPORTER_DISABLE_DEFAULT_METRICS`
Use only metrics supplied from `queries.yaml`. Value can be `true` or `false`. Default is `false`.
* `PG_EXPORTER_DISABLE_SETTINGS_METRICS`
Use the flag if you don't want to scrape `pg_settings`. Value can be `true` or `false`. Defauls is `false`.
* `PG_EXPORTER_AUTO_DISCOVER_DATABASES`
Whether to discover the databases on a server dynamically. Value can be `true` or `false`. Defauls is `false`.
* `PG_EXPORTER_EXTEND_QUERY_PATH`
Path to a YAML file containing custom queries to run. Check out [`queries.yaml`](queries.yaml)
for examples of the format.
* `PG_EXPORTER_CONSTANT_LABELS`
Labels to set in all metrics. A list of `label=value` pairs, separated by commas.
* `PG_EXPORTER_EXCLUDE_DATABASES`
A comma-separated list of databases to remove when autoDiscoverDatabases is enabled. Default is empty string.
Settings set by environment variables starting with `PG_` will be overwritten by the corresponding CLI flag if given.
### Setting the Postgres server's data source name
The PostgreSQL server's [data source name](http://en.wikipedia.org/wiki/Data_source_name)
must be set via the `DATA_SOURCE_NAME` environment variable.
For running it locally on a default Debian/Ubuntu install, this will work (transpose to init script as appropriate):
sudo -u postgres DATA_SOURCE_NAME="user=postgres host=/var/run/postgresql/ sslmode=disable" postgres_exporter
Also, you can set a list of sources to scrape different instances from the one exporter setup. Just define a comma separated string.
sudo -u postgres DATA_SOURCE_NAME="port=5432,port=6432" postgres_exporter
See the [github.com/lib/pq](http://github.com/lib/pq) module for other ways to format the connection string.
### Adding new metrics
The exporter will attempt to dynamically export additional metrics if they are added in the
future, but they will be marked as "untyped". Additional metric maps can be easily created
from Postgres documentation by copying the tables and using the following Python snippet:
```python
x = """tab separated raw text of a documentation table"""
for l in StringIO(x):
column, ctype, description = l.split('\t')
print """"{0}" : {{ prometheus.CounterValue, prometheus.NewDesc("pg_stat_database_{0}", "{2}", nil, nil) }}, """.format(column.strip(), ctype, description.strip())
```
Adjust the value of the resultant prometheus value type appropriately. This helps build
rich self-documenting metrics for the exporter.
### Adding new metrics via a config file
The -extend.query-path command-line argument specifies a YAML file containing additional queries to run.
Some examples are provided in [queries.yaml](queries.yaml).
### Disabling default metrics
To work with non-officially-supported postgres versions you can try disabling (e.g. 8.2.15)
or a variant of postgres (e.g. Greenplum) you can disable the default metrics with the `--disable-default-metrics`
flag. This removes all built-in metrics, and uses only metrics defined by queries in the `queries.yaml` file you supply
(so you must supply one, otherwise the exporter will return nothing but internal statuses and not your database).
### Automatically discover databases
To scrape metrics from all databases on a database server, the database DSN's can be dynamically discovered via the
`--auto-discover-databases` flag. When true, `SELECT datname FROM pg_database WHERE datallowconn = true AND datistemplate = false and datname != current_database()` is run for all configured DSN's. From the
result a new set of DSN's is created for which the metrics are scraped.
In addition, the option `--exclude-databases` adds the possibily to filter the result from the auto discovery to discard databases you do not need.
### Running as non-superuser
To be able to collect metrics from `pg_stat_activity` and `pg_stat_replication`
as non-superuser you have to create functions and views as a superuser, and
assign permissions separately to those.
In PostgreSQL, views run with the permissions of the user that created them so
they can act as security barriers. Functions need to be created to share this
data with the non-superuser. Only creating the views will leave out the most
important bits of data.
```sql
-- To use IF statements, hence to be able to check if the user exists before
-- attempting creation, we need to switch to procedural SQL (PL/pgSQL)
-- instead of standard SQL.
-- More: https://www.postgresql.org/docs/9.3/plpgsql-overview.html
-- To preserve compatibility with <9.0, DO blocks are not used; instead,
-- a function is created and dropped.
CREATE OR REPLACE FUNCTION __tmp_create_user() returns void as $$
BEGIN
IF NOT EXISTS (
SELECT -- SELECT list can stay empty for this
FROM pg_catalog.pg_user
WHERE usename = 'postgres_exporter') THEN
CREATE USER postgres_exporter;
END IF;
END;
$$ language plpgsql;
SELECT __tmp_create_user();
DROP FUNCTION __tmp_create_user();
ALTER USER postgres_exporter WITH PASSWORD 'password';
ALTER USER postgres_exporter SET SEARCH_PATH TO postgres_exporter,pg_catalog;
-- If deploying as non-superuser (for example in AWS RDS), uncomment the GRANT
-- line below and replace <MASTER_USER> with your root user.
-- GRANT postgres_exporter TO <MASTER_USER>;
CREATE SCHEMA IF NOT EXISTS postgres_exporter;
GRANT USAGE ON SCHEMA postgres_exporter TO postgres_exporter;
GRANT CONNECT ON DATABASE postgres TO postgres_exporter;
CREATE OR REPLACE FUNCTION get_pg_stat_activity() RETURNS SETOF pg_stat_activity AS
$$ SELECT * FROM pg_catalog.pg_stat_activity; $$
LANGUAGE sql
VOLATILE
SECURITY DEFINER;
CREATE OR REPLACE VIEW postgres_exporter.pg_stat_activity
AS
SELECT * from get_pg_stat_activity();
GRANT SELECT ON postgres_exporter.pg_stat_activity TO postgres_exporter;
CREATE OR REPLACE FUNCTION get_pg_stat_replication() RETURNS SETOF pg_stat_replication AS
$$ SELECT * FROM pg_catalog.pg_stat_replication; $$
LANGUAGE sql
VOLATILE
SECURITY DEFINER;
CREATE OR REPLACE VIEW postgres_exporter.pg_stat_replication
AS
SELECT * FROM get_pg_stat_replication();
GRANT SELECT ON postgres_exporter.pg_stat_replication TO postgres_exporter;
```
> **NOTE**
> <br />Remember to use `postgres` database name in the connection string:
> ```
> DATA_SOURCE_NAME=postgresql://postgres_exporter:password@localhost:5432/postgres?sslmode=disable
> ```
# Hacking
* To build a copy for your current architecture run `go run mage.go binary`.
This will create a symlink to the just built binary in the root directory.
* To build release tar balls run `go run mage.go release`.
* Build system is a bit temperamental at the moment since the conversion to mage - I am working on getting it
to be a perfect out of the box experience, but am time-constrained on it at the moment.

View File

@ -0,0 +1,141 @@
package main
import (
"fmt"
"math"
"strconv"
"strings"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/common/log"
)
// Query the pg_settings view containing runtime variables
func querySettings(ch chan<- prometheus.Metric, server *Server) error {
log.Debugf("Querying pg_setting view on %q", server)
// pg_settings docs: https://www.postgresql.org/docs/current/static/view-pg-settings.html
//
// NOTE: If you add more vartypes here, you must update the supported
// types in normaliseUnit() below
query := "SELECT name, setting, COALESCE(unit, ''), short_desc, vartype FROM pg_settings WHERE vartype IN ('bool', 'integer', 'real');"
rows, err := server.db.Query(query)
if err != nil {
return fmt.Errorf("Error running query on database %q: %s %v", server, namespace, err)
}
defer rows.Close() // nolint: errcheck
for rows.Next() {
s := &pgSetting{}
err = rows.Scan(&s.name, &s.setting, &s.unit, &s.shortDesc, &s.vartype)
if err != nil {
return fmt.Errorf("Error retrieving rows on %q: %s %v", server, namespace, err)
}
ch <- s.metric(server.labels)
}
return nil
}
// pgSetting is represents a PostgreSQL runtime variable as returned by the
// pg_settings view.
type pgSetting struct {
name, setting, unit, shortDesc, vartype string
}
func (s *pgSetting) metric(labels prometheus.Labels) prometheus.Metric {
var (
err error
name = strings.Replace(s.name, ".", "_", -1)
unit = s.unit // nolint: ineffassign
shortDesc = s.shortDesc
subsystem = "settings"
val float64
)
switch s.vartype {
case "bool":
if s.setting == "on" {
val = 1
}
case "integer", "real":
if val, unit, err = s.normaliseUnit(); err != nil {
// Panic, since we should recognise all units
// and don't want to silently exlude metrics
panic(err)
}
if len(unit) > 0 {
name = fmt.Sprintf("%s_%s", name, unit)
shortDesc = fmt.Sprintf("%s [Units converted to %s.]", shortDesc, unit)
}
default:
// Panic because we got a type we didn't ask for
panic(fmt.Sprintf("Unsupported vartype %q", s.vartype))
}
desc := newDesc(subsystem, name, shortDesc, labels)
return prometheus.MustNewConstMetric(desc, prometheus.GaugeValue, val)
}
// TODO: fix linter override
// nolint: nakedret
func (s *pgSetting) normaliseUnit() (val float64, unit string, err error) {
val, err = strconv.ParseFloat(s.setting, 64)
if err != nil {
return val, unit, fmt.Errorf("Error converting setting %q value %q to float: %s", s.name, s.setting, err)
}
// Units defined in: https://www.postgresql.org/docs/current/static/config-setting.html
switch s.unit {
case "":
return
case "ms", "s", "min", "h", "d":
unit = "seconds"
case "B", "kB", "MB", "GB", "TB", "8kB", "16kB", "32kB", "16MB", "32MB", "64MB":
unit = "bytes"
default:
err = fmt.Errorf("Unknown unit for runtime variable: %q", s.unit)
return
}
// -1 is special, don't modify the value
if val == -1 {
return
}
switch s.unit {
case "ms":
val /= 1000
case "min":
val *= 60
case "h":
val *= 60 * 60
case "d":
val *= 60 * 60 * 24
case "kB":
val *= math.Pow(2, 10)
case "MB":
val *= math.Pow(2, 20)
case "GB":
val *= math.Pow(2, 30)
case "TB":
val *= math.Pow(2, 40)
case "8kB":
val *= math.Pow(2, 13)
case "16kB":
val *= math.Pow(2, 14)
case "32kB":
val *= math.Pow(2, 15)
case "16MB":
val *= math.Pow(2, 24)
case "32MB":
val *= math.Pow(2, 25)
case "64MB":
val *= math.Pow(2, 26)
}
return
}

View File

@ -0,0 +1,256 @@
// +build !integration
package main
import (
"github.com/prometheus/client_golang/prometheus"
dto "github.com/prometheus/client_model/go"
. "gopkg.in/check.v1"
)
type PgSettingSuite struct{}
var _ = Suite(&PgSettingSuite{})
var fixtures = []fixture{
{
p: pgSetting{
name: "seconds_fixture_metric",
setting: "5",
unit: "s",
shortDesc: "Foo foo foo",
vartype: "integer",
},
n: normalised{
val: 5,
unit: "seconds",
err: "",
},
d: `Desc{fqName: "pg_settings_seconds_fixture_metric_seconds", help: "Foo foo foo [Units converted to seconds.]", constLabels: {}, variableLabels: []}`,
v: 5,
},
{
p: pgSetting{
name: "milliseconds_fixture_metric",
setting: "5000",
unit: "ms",
shortDesc: "Foo foo foo",
vartype: "integer",
},
n: normalised{
val: 5,
unit: "seconds",
err: "",
},
d: `Desc{fqName: "pg_settings_milliseconds_fixture_metric_seconds", help: "Foo foo foo [Units converted to seconds.]", constLabels: {}, variableLabels: []}`,
v: 5,
},
{
p: pgSetting{
name: "eight_kb_fixture_metric",
setting: "17",
unit: "8kB",
shortDesc: "Foo foo foo",
vartype: "integer",
},
n: normalised{
val: 139264,
unit: "bytes",
err: "",
},
d: `Desc{fqName: "pg_settings_eight_kb_fixture_metric_bytes", help: "Foo foo foo [Units converted to bytes.]", constLabels: {}, variableLabels: []}`,
v: 139264,
},
{
p: pgSetting{
name: "16_kb_real_fixture_metric",
setting: "3.0",
unit: "16kB",
shortDesc: "Foo foo foo",
vartype: "real",
},
n: normalised{
val: 49152,
unit: "bytes",
err: "",
},
d: `Desc{fqName: "pg_settings_16_kb_real_fixture_metric_bytes", help: "Foo foo foo [Units converted to bytes.]", constLabels: {}, variableLabels: []}`,
v: 49152,
},
{
p: pgSetting{
name: "16_mb_real_fixture_metric",
setting: "3.0",
unit: "16MB",
shortDesc: "Foo foo foo",
vartype: "real",
},
n: normalised{
val: 5.0331648e+07,
unit: "bytes",
err: "",
},
d: `Desc{fqName: "pg_settings_16_mb_real_fixture_metric_bytes", help: "Foo foo foo [Units converted to bytes.]", constLabels: {}, variableLabels: []}`,
v: 5.0331648e+07,
},
{
p: pgSetting{
name: "32_mb_real_fixture_metric",
setting: "3.0",
unit: "32MB",
shortDesc: "Foo foo foo",
vartype: "real",
},
n: normalised{
val: 1.00663296e+08,
unit: "bytes",
err: "",
},
d: `Desc{fqName: "pg_settings_32_mb_real_fixture_metric_bytes", help: "Foo foo foo [Units converted to bytes.]", constLabels: {}, variableLabels: []}`,
v: 1.00663296e+08,
},
{
p: pgSetting{
name: "64_mb_real_fixture_metric",
setting: "3.0",
unit: "64MB",
shortDesc: "Foo foo foo",
vartype: "real",
},
n: normalised{
val: 2.01326592e+08,
unit: "bytes",
err: "",
},
d: `Desc{fqName: "pg_settings_64_mb_real_fixture_metric_bytes", help: "Foo foo foo [Units converted to bytes.]", constLabels: {}, variableLabels: []}`,
v: 2.01326592e+08,
},
{
p: pgSetting{
name: "bool_on_fixture_metric",
setting: "on",
unit: "",
shortDesc: "Foo foo foo",
vartype: "bool",
},
n: normalised{
val: 1,
unit: "",
err: "",
},
d: `Desc{fqName: "pg_settings_bool_on_fixture_metric", help: "Foo foo foo", constLabels: {}, variableLabels: []}`,
v: 1,
},
{
p: pgSetting{
name: "bool_off_fixture_metric",
setting: "off",
unit: "",
shortDesc: "Foo foo foo",
vartype: "bool",
},
n: normalised{
val: 0,
unit: "",
err: "",
},
d: `Desc{fqName: "pg_settings_bool_off_fixture_metric", help: "Foo foo foo", constLabels: {}, variableLabels: []}`,
v: 0,
},
{
p: pgSetting{
name: "special_minus_one_value",
setting: "-1",
unit: "d",
shortDesc: "foo foo foo",
vartype: "integer",
},
n: normalised{
val: -1,
unit: "seconds",
err: "",
},
d: `Desc{fqName: "pg_settings_special_minus_one_value_seconds", help: "foo foo foo [Units converted to seconds.]", constLabels: {}, variableLabels: []}`,
v: -1,
},
{
p: pgSetting{
name: "rds.rds_superuser_reserved_connections",
setting: "2",
unit: "",
shortDesc: "Sets the number of connection slots reserved for rds_superusers.",
vartype: "integer",
},
n: normalised{
val: 2,
unit: "",
err: "",
},
d: `Desc{fqName: "pg_settings_rds_rds_superuser_reserved_connections", help: "Sets the number of connection slots reserved for rds_superusers.", constLabels: {}, variableLabels: []}`,
v: 2,
},
{
p: pgSetting{
name: "unknown_unit",
setting: "10",
unit: "nonexistent",
shortDesc: "foo foo foo",
vartype: "integer",
},
n: normalised{
val: 10,
unit: "",
err: `Unknown unit for runtime variable: "nonexistent"`,
},
},
}
func (s *PgSettingSuite) TestNormaliseUnit(c *C) {
for _, f := range fixtures {
switch f.p.vartype {
case "integer", "real":
val, unit, err := f.p.normaliseUnit()
c.Check(val, Equals, f.n.val)
c.Check(unit, Equals, f.n.unit)
if err == nil {
c.Check("", Equals, f.n.err)
} else {
c.Check(err.Error(), Equals, f.n.err)
}
}
}
}
func (s *PgSettingSuite) TestMetric(c *C) {
defer func() {
if r := recover(); r != nil {
if r.(error).Error() != `Unknown unit for runtime variable: "nonexistent"` {
panic(r)
}
}
}()
for _, f := range fixtures {
d := &dto.Metric{}
m := f.p.metric(prometheus.Labels{})
m.Write(d) // nolint: errcheck
c.Check(m.Desc().String(), Equals, f.d)
c.Check(d.GetGauge().GetValue(), Equals, f.v)
}
}
type normalised struct {
val float64
unit string
err string
}
type fixture struct {
p pgSetting
n normalised
d string
v float64
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,128 @@
// These are specialized integration tests. We only build them when we're doing
// a lot of additional work to keep the external docker environment they require
// working.
// +build integration
package main
import (
"fmt"
"os"
"strings"
"testing"
_ "github.com/lib/pq"
"github.com/prometheus/client_golang/prometheus"
. "gopkg.in/check.v1"
)
// Hook up gocheck into the "go test" runner.
func Test(t *testing.T) { TestingT(t) }
type IntegrationSuite struct {
e *Exporter
}
var _ = Suite(&IntegrationSuite{})
func (s *IntegrationSuite) SetUpSuite(c *C) {
dsn := os.Getenv("DATA_SOURCE_NAME")
c.Assert(dsn, Not(Equals), "")
exporter := NewExporter(strings.Split(dsn, ","))
c.Assert(exporter, NotNil)
// Assign the exporter to the suite
s.e = exporter
prometheus.MustRegister(exporter)
}
// TODO: it would be nice if cu didn't mostly just recreate the scrape function
func (s *IntegrationSuite) TestAllNamespacesReturnResults(c *C) {
// Setup a dummy channel to consume metrics
ch := make(chan prometheus.Metric, 100)
go func() {
for range ch {
}
}()
for _, dsn := range s.e.dsn {
// Open a database connection
server, err := NewServer(dsn)
c.Assert(server, NotNil)
c.Assert(err, IsNil)
// Do a version update
err = s.e.checkMapVersions(ch, server)
c.Assert(err, IsNil)
err = querySettings(ch, server)
if !c.Check(err, Equals, nil) {
fmt.Println("## ERRORS FOUND")
fmt.Println(err)
}
// This should never happen in our test cases.
errMap := queryNamespaceMappings(ch, server)
if !c.Check(len(errMap), Equals, 0) {
fmt.Println("## NAMESPACE ERRORS FOUND")
for namespace, err := range errMap {
fmt.Println(namespace, ":", err)
}
}
server.Close()
}
}
// TestInvalidDsnDoesntCrash tests that specifying an invalid DSN doesn't crash
// the exporter. Related to https://github.com/wrouesnel/postgres_exporter/issues/93
// although not a replication of the scenario.
func (s *IntegrationSuite) TestInvalidDsnDoesntCrash(c *C) {
// Setup a dummy channel to consume metrics
ch := make(chan prometheus.Metric, 100)
go func() {
for range ch {
}
}()
// Send a bad DSN
exporter := NewExporter([]string{"invalid dsn"})
c.Assert(exporter, NotNil)
exporter.scrape(ch)
// Send a DSN to a non-listening port.
exporter = NewExporter([]string{"postgresql://nothing:nothing@127.0.0.1:1/nothing"})
c.Assert(exporter, NotNil)
exporter.scrape(ch)
}
// TestUnknownMetricParsingDoesntCrash deliberately deletes all the column maps out
// of an exporter to test that the default metric handling code can cope with unknown columns.
func (s *IntegrationSuite) TestUnknownMetricParsingDoesntCrash(c *C) {
// Setup a dummy channel to consume metrics
ch := make(chan prometheus.Metric, 100)
go func() {
for range ch {
}
}()
dsn := os.Getenv("DATA_SOURCE_NAME")
c.Assert(dsn, Not(Equals), "")
exporter := NewExporter(strings.Split(dsn, ","))
c.Assert(exporter, NotNil)
// Convert the default maps into a list of empty maps.
emptyMaps := make(map[string]intermediateMetricMap, 0)
for k := range exporter.builtinMetricMaps {
emptyMaps[k] = intermediateMetricMap{
map[string]ColumnMapping{},
true,
0,
}
}
exporter.builtinMetricMaps = emptyMaps
// scrape the exporter and make sure it works
exporter.scrape(ch)
}

View File

@ -0,0 +1,326 @@
// +build !integration
package main
import (
"io/ioutil"
"os"
"reflect"
"testing"
"github.com/blang/semver"
"github.com/prometheus/client_golang/prometheus"
. "gopkg.in/check.v1"
)
// Hook up gocheck into the "go test" runner.
func Test(t *testing.T) { TestingT(t) }
type FunctionalSuite struct {
}
var _ = Suite(&FunctionalSuite{})
func (s *FunctionalSuite) SetUpSuite(c *C) {
}
func (s *FunctionalSuite) TestSemanticVersionColumnDiscard(c *C) {
testMetricMap := map[string]intermediateMetricMap{
"test_namespace": {
map[string]ColumnMapping{
"metric_which_stays": {COUNTER, "This metric should not be eliminated", nil, nil},
"metric_which_discards": {COUNTER, "This metric should be forced to DISCARD", nil, nil},
},
true,
0,
},
}
{
// No metrics should be eliminated
resultMap := makeDescMap(semver.MustParse("0.0.1"), prometheus.Labels{}, testMetricMap)
c.Check(
resultMap["test_namespace"].columnMappings["metric_which_stays"].discard,
Equals,
false,
)
c.Check(
resultMap["test_namespace"].columnMappings["metric_which_discards"].discard,
Equals,
false,
)
}
// nolint: dupl
{
// Update the map so the discard metric should be eliminated
discardableMetric := testMetricMap["test_namespace"].columnMappings["metric_which_discards"]
discardableMetric.supportedVersions = semver.MustParseRange(">0.0.1")
testMetricMap["test_namespace"].columnMappings["metric_which_discards"] = discardableMetric
// Discard metric should be discarded
resultMap := makeDescMap(semver.MustParse("0.0.1"), prometheus.Labels{}, testMetricMap)
c.Check(
resultMap["test_namespace"].columnMappings["metric_which_stays"].discard,
Equals,
false,
)
c.Check(
resultMap["test_namespace"].columnMappings["metric_which_discards"].discard,
Equals,
true,
)
}
// nolint: dupl
{
// Update the map so the discard metric should be kept but has a version
discardableMetric := testMetricMap["test_namespace"].columnMappings["metric_which_discards"]
discardableMetric.supportedVersions = semver.MustParseRange(">0.0.1")
testMetricMap["test_namespace"].columnMappings["metric_which_discards"] = discardableMetric
// Discard metric should be discarded
resultMap := makeDescMap(semver.MustParse("0.0.2"), prometheus.Labels{}, testMetricMap)
c.Check(
resultMap["test_namespace"].columnMappings["metric_which_stays"].discard,
Equals,
false,
)
c.Check(
resultMap["test_namespace"].columnMappings["metric_which_discards"].discard,
Equals,
false,
)
}
}
// test read username and password from file
func (s *FunctionalSuite) TestEnvironmentSettingWithSecretsFiles(c *C) {
err := os.Setenv("DATA_SOURCE_USER_FILE", "./tests/username_file")
c.Assert(err, IsNil)
defer UnsetEnvironment(c, "DATA_SOURCE_USER_FILE")
err = os.Setenv("DATA_SOURCE_PASS_FILE", "./tests/userpass_file")
c.Assert(err, IsNil)
defer UnsetEnvironment(c, "DATA_SOURCE_PASS_FILE")
err = os.Setenv("DATA_SOURCE_URI", "localhost:5432/?sslmode=disable")
c.Assert(err, IsNil)
defer UnsetEnvironment(c, "DATA_SOURCE_URI")
var expected = "postgresql://custom_username$&+,%2F%3A;=%3F%40:custom_password$&+,%2F%3A;=%3F%40@localhost:5432/?sslmode=disable"
dsn := getDataSources()
if len(dsn) == 0 {
c.Errorf("Expected one data source, zero found")
}
if dsn[0] != expected {
c.Errorf("Expected Username to be read from file. Found=%v, expected=%v", dsn[0], expected)
}
}
// test read DATA_SOURCE_NAME from environment
func (s *FunctionalSuite) TestEnvironmentSettingWithDns(c *C) {
envDsn := "postgresql://user:password@localhost:5432/?sslmode=enabled"
err := os.Setenv("DATA_SOURCE_NAME", envDsn)
c.Assert(err, IsNil)
defer UnsetEnvironment(c, "DATA_SOURCE_NAME")
dsn := getDataSources()
if len(dsn) == 0 {
c.Errorf("Expected one data source, zero found")
}
if dsn[0] != envDsn {
c.Errorf("Expected Username to be read from file. Found=%v, expected=%v", dsn[0], envDsn)
}
}
// test DATA_SOURCE_NAME is used even if username and password environment variables are set
func (s *FunctionalSuite) TestEnvironmentSettingWithDnsAndSecrets(c *C) {
envDsn := "postgresql://userDsn:passwordDsn@localhost:55432/?sslmode=disabled"
err := os.Setenv("DATA_SOURCE_NAME", envDsn)
c.Assert(err, IsNil)
defer UnsetEnvironment(c, "DATA_SOURCE_NAME")
err = os.Setenv("DATA_SOURCE_USER_FILE", "./tests/username_file")
c.Assert(err, IsNil)
defer UnsetEnvironment(c, "DATA_SOURCE_USER_FILE")
err = os.Setenv("DATA_SOURCE_PASS", "envUserPass")
c.Assert(err, IsNil)
defer UnsetEnvironment(c, "DATA_SOURCE_PASS")
dsn := getDataSources()
if len(dsn) == 0 {
c.Errorf("Expected one data source, zero found")
}
if dsn[0] != envDsn {
c.Errorf("Expected Username to be read from file. Found=%v, expected=%v", dsn[0], envDsn)
}
}
func (s *FunctionalSuite) TestPostgresVersionParsing(c *C) {
type TestCase struct {
input string
expected string
}
cases := []TestCase{
{
input: "PostgreSQL 10.1 on x86_64-pc-linux-gnu, compiled by gcc (Debian 6.3.0-18) 6.3.0 20170516, 64-bit",
expected: "10.1.0",
},
{
input: "PostgreSQL 9.5.4, compiled by Visual C++ build 1800, 64-bit",
expected: "9.5.4",
},
{
input: "EnterpriseDB 9.6.5.10 on x86_64-pc-linux-gnu, compiled by gcc (GCC) 4.4.7 20120313 (Red Hat 4.4.7-16), 64-bit",
expected: "9.6.5",
},
}
for _, cs := range cases {
ver, err := parseVersion(cs.input)
c.Assert(err, IsNil)
c.Assert(ver.String(), Equals, cs.expected)
}
}
func (s *FunctionalSuite) TestParseFingerprint(c *C) {
cases := []struct {
url string
fingerprint string
err string
}{
{
url: "postgresql://userDsn:passwordDsn@localhost:55432/?sslmode=disabled",
fingerprint: "localhost:55432",
},
{
url: "postgresql://userDsn:passwordDsn%3D@localhost:55432/?sslmode=disabled",
fingerprint: "localhost:55432",
},
{
url: "port=1234",
fingerprint: "localhost:1234",
},
{
url: "host=example",
fingerprint: "example:5432",
},
{
url: "xyz",
err: "malformed dsn \"xyz\"",
},
}
for _, cs := range cases {
f, err := parseFingerprint(cs.url)
if cs.err == "" {
c.Assert(err, IsNil)
} else {
c.Assert(err, NotNil)
c.Assert(err.Error(), Equals, cs.err)
}
c.Assert(f, Equals, cs.fingerprint)
}
}
func (s *FunctionalSuite) TestParseConstLabels(c *C) {
cases := []struct {
s string
labels prometheus.Labels
}{
{
s: "a=b",
labels: prometheus.Labels{
"a": "b",
},
},
{
s: "",
labels: prometheus.Labels{},
},
{
s: "a=b, c=d",
labels: prometheus.Labels{
"a": "b",
"c": "d",
},
},
{
s: "a=b, xyz",
labels: prometheus.Labels{
"a": "b",
},
},
{
s: "a=",
labels: prometheus.Labels{},
},
}
for _, cs := range cases {
labels := parseConstLabels(cs.s)
if !reflect.DeepEqual(labels, cs.labels) {
c.Fatalf("labels not equal (%v -> %v)", labels, cs.labels)
}
}
}
func UnsetEnvironment(c *C, d string) {
err := os.Unsetenv(d)
c.Assert(err, IsNil)
}
// test boolean metric type gets converted to float
func (s *FunctionalSuite) TestBooleanConversionToValueAndString(c *C) {
type TestCase struct {
input interface{}
expectedString string
expectedValue float64
expectedOK bool
}
cases := []TestCase{
{
input: true,
expectedString: "true",
expectedValue: 1.0,
expectedOK: true,
},
{
input: false,
expectedString: "false",
expectedValue: 0.0,
expectedOK: true,
},
}
for _, cs := range cases {
value, ok := dbToFloat64(cs.input)
c.Assert(value, Equals, cs.expectedValue)
c.Assert(ok, Equals, cs.expectedOK)
str, ok := dbToString(cs.input)
c.Assert(str, Equals, cs.expectedString)
c.Assert(ok, Equals, cs.expectedOK)
}
}
func (s *FunctionalSuite) TestParseUserQueries(c *C) {
userQueriesData, err := ioutil.ReadFile("./tests/user_queries_ok.yaml")
if err == nil {
metricMaps, newQueryOverrides, err := parseUserQueries(userQueriesData)
c.Assert(err, Equals, nil)
c.Assert(metricMaps, NotNil)
c.Assert(newQueryOverrides, NotNil)
if len(metricMaps) != 2 {
c.Errorf("Expected 2 metrics from user file, got %d", len(metricMaps))
}
}
}

View File

@ -0,0 +1,8 @@
FROM postgres:11
MAINTAINER Daniel Dent (https://www.danieldent.com)
ENV PG_MAX_WAL_SENDERS 8
ENV PG_WAL_KEEP_SEGMENTS 8
RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y inetutils-ping
COPY setup-replication.sh /docker-entrypoint-initdb.d/
COPY docker-entrypoint.sh /docker-entrypoint.sh
RUN chmod +x /docker-entrypoint-initdb.d/setup-replication.sh /docker-entrypoint.sh

View File

@ -0,0 +1,8 @@
FROM postgres:{{VERSION}}
MAINTAINER Daniel Dent (https://www.danieldent.com)
ENV PG_MAX_WAL_SENDERS 8
ENV PG_WAL_KEEP_SEGMENTS 8
RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y inetutils-ping
COPY setup-replication.sh /docker-entrypoint-initdb.d/
COPY docker-entrypoint.sh /docker-entrypoint.sh
RUN chmod +x /docker-entrypoint-initdb.d/setup-replication.sh /docker-entrypoint.sh

View File

@ -0,0 +1,11 @@
# Replicated postgres cluster in docker.
Upstream is forked from https://github.com/DanielDent/docker-postgres-replication
My version lives at https://github.com/wrouesnel/docker-postgres-replication
This very simple docker-compose file lets us stand up a replicated postgres
cluster so we can test streaming.
# TODO:
Pull in p2 and template the Dockerfile so we can test multiple versions.

View File

@ -0,0 +1,32 @@
version: '2'
services:
pg-master:
build: '.'
image: 'danieldent/postgres-replication'
restart: 'always'
environment:
POSTGRES_USER: 'postgres'
POSTGRES_PASSWORD: 'postgres'
PGDATA: '/var/lib/postgresql/data/pgdata'
volumes:
- '/var/lib/postgresql/data'
expose:
- '5432'
pg-slave:
build: '.'
image: 'danieldent/postgres-replication'
restart: 'always'
environment:
POSTGRES_USER: 'postgres'
POSTGRES_PASSWORD: 'postgres'
PGDATA: '/var/lib/postgresql/data/pgdata'
REPLICATE_FROM: 'pg-master'
volumes:
- '/var/lib/postgresql/data'
expose:
- '5432'
links:
- 'pg-master'

View File

@ -0,0 +1,140 @@
#!/bin/bash
# Backwards compatibility for old variable names (deprecated)
if [ "x$PGUSER" != "x" ]; then
POSTGRES_USER=$PGUSER
fi
if [ "x$PGPASSWORD" != "x" ]; then
POSTGRES_PASSWORD=$PGPASSWORD
fi
# Forwards-compatibility for old variable names (pg_basebackup uses them)
if [ "x$PGPASSWORD" = "x" ]; then
export PGPASSWORD=$POSTGRES_PASSWORD
fi
# Based on official postgres package's entrypoint script (https://hub.docker.com/_/postgres/)
# Modified to be able to set up a slave. The docker-entrypoint-initdb.d hook provided is inadequate.
set -e
if [ "${1:0:1}" = '-' ]; then
set -- postgres "$@"
fi
if [ "$1" = 'postgres' ]; then
mkdir -p "$PGDATA"
chmod 700 "$PGDATA"
chown -R postgres "$PGDATA"
mkdir -p /run/postgresql
chmod g+s /run/postgresql
chown -R postgres /run/postgresql
# look specifically for PG_VERSION, as it is expected in the DB dir
if [ ! -s "$PGDATA/PG_VERSION" ]; then
if [ "x$REPLICATE_FROM" == "x" ]; then
eval "gosu postgres initdb $POSTGRES_INITDB_ARGS"
else
until /bin/ping -c 1 -W 1 ${REPLICATE_FROM}
do
echo "Waiting for master to ping..."
sleep 1s
done
until gosu postgres pg_basebackup -h ${REPLICATE_FROM} -D ${PGDATA} -U ${POSTGRES_USER} -vP -w
do
echo "Waiting for master to connect..."
sleep 1s
done
fi
# check password first so we can output the warning before postgres
# messes it up
if [ ! -z "$POSTGRES_PASSWORD" ]; then
pass="PASSWORD '$POSTGRES_PASSWORD'"
authMethod=md5
else
# The - option suppresses leading tabs but *not* spaces. :)
cat >&2 <<-'EOWARN'
****************************************************
WARNING: No password has been set for the database.
This will allow anyone with access to the
Postgres port to access your database. In
Docker's default configuration, this is
effectively any other container on the same
system.
Use "-e POSTGRES_PASSWORD=password" to set
it in "docker run".
****************************************************
EOWARN
pass=
authMethod=trust
fi
if [ "x$REPLICATE_FROM" == "x" ]; then
{ echo; echo "host replication all 0.0.0.0/0 $authMethod"; } | gosu postgres tee -a "$PGDATA/pg_hba.conf" > /dev/null
{ echo; echo "host all all 0.0.0.0/0 $authMethod"; } | gosu postgres tee -a "$PGDATA/pg_hba.conf" > /dev/null
# internal start of server in order to allow set-up using psql-client
# does not listen on external TCP/IP and waits until start finishes
gosu postgres pg_ctl -D "$PGDATA" \
-o "-c listen_addresses='localhost'" \
-w start
: ${POSTGRES_USER:=postgres}
: ${POSTGRES_DB:=$POSTGRES_USER}
export POSTGRES_USER POSTGRES_DB
psql=( "psql" "-v" "ON_ERROR_STOP=1" )
if [ "$POSTGRES_DB" != 'postgres' ]; then
"${psql[@]}" --username postgres <<-EOSQL
CREATE DATABASE "$POSTGRES_DB" ;
EOSQL
echo
fi
if [ "$POSTGRES_USER" = 'postgres' ]; then
op='ALTER'
else
op='CREATE'
fi
"${psql[@]}" --username postgres <<-EOSQL
$op USER "$POSTGRES_USER" WITH SUPERUSER $pass ;
EOSQL
echo
fi
psql+=( --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" )
echo
for f in /docker-entrypoint-initdb.d/*; do
case "$f" in
*.sh) echo "$0: running $f"; . "$f" ;;
*.sql) echo "$0: running $f"; "${psql[@]}" < "$f"; echo ;;
*.sql.gz) echo "$0: running $f"; gunzip -c "$f" | "${psql[@]}"; echo ;;
*) echo "$0: ignoring $f" ;;
esac
echo
done
if [ "x$REPLICATE_FROM" == "x" ]; then
gosu postgres pg_ctl -D "$PGDATA" -m fast -w stop
fi
echo
echo 'PostgreSQL init process complete; ready for start up.'
echo
fi
# We need this health check so we know when it's started up.
touch /tmp/.postgres_init_complete
exec gosu postgres "$@"
fi
exec "$@"

View File

@ -0,0 +1,22 @@
#!/bin/bash
if [ "x$REPLICATE_FROM" == "x" ]; then
cat >> ${PGDATA}/postgresql.conf <<EOF
wal_level = hot_standby
max_wal_senders = $PG_MAX_WAL_SENDERS
wal_keep_segments = $PG_WAL_KEEP_SEGMENTS
hot_standby = on
EOF
else
cat > ${PGDATA}/recovery.conf <<EOF
standby_mode = on
primary_conninfo = 'host=${REPLICATE_FROM} port=5432 user=${POSTGRES_USER} password=${POSTGRES_PASSWORD}'
trigger_file = '/tmp/touch_me_to_promote_to_me_master'
EOF
chown postgres ${PGDATA}/recovery.conf
chmod 600 ${PGDATA}/recovery.conf
fi

View File

@ -0,0 +1,180 @@
#!/bin/bash -x
# Basic integration tests with postgres. Requires docker to work.
SOURCE="${BASH_SOURCE[0]}"
while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
SOURCE="$(readlink "$SOURCE")"
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
done
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
METRICS_DIR=$(pwd)
# Read the absolute path to the exporter
postgres_exporter="$1"
test_binary="$2"
export POSTGRES_PASSWORD=postgres
exporter_port=9187
echo "Exporter Binary: $postgres_exporter" 1>&2
echo "Test Binary: $test_binary" 1>&2
[ -z "$postgres_exporter" ] && echo "Missing exporter binary" && exit 1
[ -z "$test_binary" ] && echo "Missing test binary" && exit 1
cd "$DIR" || exit 1
VERSIONS=( \
9.4 \
9.5 \
9.6 \
10 \
11 \
)
wait_for_postgres(){
local container=$1
local ip=$2
local port=$3
if [ -z "$ip" ]; then
echo "No IP specified." 1>&2
exit 1
fi
if [ -z "$port" ]; then
echo "No port specified." 1>&2
exit 1
fi
local wait_start
wait_start=$(date +%s) || exit 1
echo "Waiting for postgres to start listening..."
while ! docker exec "$container" pg_isready --host="$ip" --port="$port" &> /dev/null; do
if [ $(( $(date +%s) - wait_start )) -gt "$TIMEOUT" ]; then
echo "Timed out waiting for postgres to start!" 1>&2
exit 1
fi
sleep 1
done
echo "Postgres is online at $ip:$port"
}
wait_for_exporter() {
local wait_start
wait_start=$(date +%s) || exit 1
echo "Waiting for exporter to start..."
while ! nc -z localhost "$exporter_port" ; do
if [ $(( $(date +%s) - wait_start )) -gt "$TIMEOUT" ]; then
echo "Timed out waiting for exporter!" 1>&2
exit 1
fi
sleep 1
done
echo "Exporter is online at localhost:$exporter_port"
}
smoketest_postgres() {
local version=$1
local CONTAINER_NAME=postgres_exporter-test-smoke
local TIMEOUT=30
local IMAGE_NAME=postgres
local CUR_IMAGE=$IMAGE_NAME:$version
echo "#######################"
echo "Standalone Postgres $version"
echo "#######################"
local docker_cmd="docker run -d -e POSTGRES_PASSWORD=$POSTGRES_PASSWORD $CUR_IMAGE"
echo "Docker Cmd: $docker_cmd"
CONTAINER_NAME=$($docker_cmd)
standalone_ip=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' $CONTAINER_NAME)
# shellcheck disable=SC2064
trap "docker logs $CONTAINER_NAME ; docker kill $CONTAINER_NAME ; docker rm -v $CONTAINER_NAME; exit 1" EXIT INT TERM
wait_for_postgres "$CONTAINER_NAME" "$standalone_ip" 5432
# Run the test binary.
DATA_SOURCE_NAME="postgresql://postgres:$POSTGRES_PASSWORD@$standalone_ip:5432/?sslmode=disable" $test_binary || exit $?
# Extract a raw metric list.
DATA_SOURCE_NAME="postgresql://postgres:$POSTGRES_PASSWORD@$standalone_ip:5432/?sslmode=disable" $postgres_exporter \
--log.level=debug --web.listen-address=:$exporter_port &
exporter_pid=$!
# shellcheck disable=SC2064
trap "docker logs $CONTAINER_NAME ; docker kill $CONTAINER_NAME ; docker rm -v $CONTAINER_NAME; kill $exporter_pid; exit 1" EXIT INT TERM
wait_for_exporter
# Dump the metrics to a file.
if ! wget -q -O - http://localhost:$exporter_port/metrics 1> "$METRICS_DIR/.metrics.single.$version.prom" ; then
echo "Failed on postgres $version (standalone $DOCKER_IMAGE)" 1>&2
kill $exporter_pid
exit 1
fi
# HACK test: check pg_up is a 1 - TODO: expand integration tests to include metric consumption
if ! grep 'pg_up.* 1' $METRICS_DIR/.metrics.single.$version.prom ; then
echo "pg_up metric was not 1 despite exporter and database being up"
kill $exporter_pid
exit 1
fi
kill $exporter_pid
docker kill "$CONTAINER_NAME"
docker rm -v "$CONTAINER_NAME"
trap - EXIT INT TERM
echo "#######################"
echo "Replicated Postgres $version"
echo "#######################"
old_pwd=$(pwd)
cd docker-postgres-replication || exit 1
if ! VERSION="$version" p2 -t Dockerfile.p2 -o Dockerfile ; then
echo "Templating failed" 1>&2
exit 1
fi
trap "docker-compose logs; docker-compose down ; docker-compose rm -v; exit 1" EXIT INT TERM
local compose_cmd="POSTGRES_PASSWORD=$POSTGRES_PASSWORD docker-compose up -d --force-recreate --build"
echo "Compose Cmd: $compose_cmd"
eval "$compose_cmd"
master_container=$(docker-compose ps -q pg-master)
slave_container=$(docker-compose ps -q pg-slave)
master_ip=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' "$master_container")
slave_ip=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' "$slave_container")
echo "Got master IP: $master_ip"
wait_for_postgres "$master_container" "$master_ip" 5432
wait_for_postgres "$slave_container" "$slave_ip" 5432
DATA_SOURCE_NAME="postgresql://postgres:$POSTGRES_PASSWORD@$master_ip:5432/?sslmode=disable" $test_binary || exit $?
DATA_SOURCE_NAME="postgresql://postgres:$POSTGRES_PASSWORD@$master_ip:5432/?sslmode=disable" $postgres_exporter \
--log.level=debug --web.listen-address=:$exporter_port &
exporter_pid=$!
# shellcheck disable=SC2064
trap "docker-compose logs; docker-compose down ; docker-compose rm -v ; kill $exporter_pid; exit 1" EXIT INT TERM
wait_for_exporter
if ! wget -q -O - http://localhost:$exporter_port/metrics 1> "$METRICS_DIR/.metrics.replicated.$version.prom" ; then
echo "Failed on postgres $version (replicated $DOCKER_IMAGE)" 1>&2
exit 1
fi
kill $exporter_pid
docker-compose down
docker-compose rm -v
trap - EXIT INT TERM
cd "$old_pwd" || exit 1
}
# Start pulling the docker images in advance
for version in "${VERSIONS[@]}"; do
docker pull "postgres:$version" > /dev/null &
done
for version in "${VERSIONS[@]}"; do
echo "Testing postgres version $version"
smoketest_postgres "$version"
done

View File

@ -0,0 +1,23 @@
pg_locks_mode:
query: "WITH q_locks AS (select * from pg_locks where pid != pg_backend_pid() and database = (select oid from pg_database where datname = current_database())) SELECT (select current_database()) as datname,
lockmodes AS tag_lockmode, coalesce((select count(*) FROM q_locks WHERE mode = lockmodes), 0) AS count FROM
unnest('{AccessShareLock, ExclusiveLock, RowShareLock, RowExclusiveLock, ShareLock, ShareRowExclusiveLock, AccessExclusiveLock, ShareUpdateExclusiveLock}'::text[]) lockmodes;"
metrics:
- datname:
usage: "LABEL"
description: "Database name"
- tag_lockmode:
usage: "LABEL"
description: "Lock type"
- count:
usage: "GAUGE"
description: "Number of lock"
pg_wal:
query: "select current_database() as datname, case when pg_is_in_recovery() = false then pg_xlog_location_diff(pg_current_xlog_location(), '0/0')::int8 else pg_xlog_location_diff(pg_last_xlog_replay_location(), '0/0')::int8 end as xlog_location_b;"
metrics:
- datname:
usage: "LABEL"
description: "Database name"
- xlog_location_b:
usage: "COUNTER"
description: "current transaction log write location"

View File

@ -0,0 +1 @@
custom_username$&+,/:;=?@

View File

@ -0,0 +1 @@
custom_password$&+,/:;=?@

57
example.alerts.yml Normal file
View File

@ -0,0 +1,57 @@
---
groups:
- name: PostgreSQL
rules:
- alert: PostgreSQLMaxConnectionsReached
expr: sum(pg_stat_activity_count) by (instance) > sum(pg_settings_max_connections) by (instance)
for: 1m
labels:
severity: email
annotations:
summary: "{{ $labels.instance }} has maxed out Postgres connections."
description: "{{ $labels.instance }} is exceeding the currently configured maximum Postgres connection limit (current value: {{ $value }}s). Services may be degraded - please take immediate action (you probably need to increase max_connections in the Docker image and re-deploy."
- alert: PostgreSQLHighConnections
expr: sum(pg_stat_activity_count) by (instance) > sum(pg_settings_max_connections * 0.8) by (instance)
for: 10m
labels:
severity: email
annotations:
summary: "{{ $labels.instance }} is over 80% of max Postgres connections."
description: "{{ $labels.instance }} is exceeding 80% of the currently configured maximum Postgres connection limit (current value: {{ $value }}s). Please check utilization graphs and confirm if this is normal service growth, abuse or an otherwise temporary condition or if new resources need to be provisioned (or the limits increased, which is mostly likely)."
- alert: PostgreSQLDown
expr: pg_up != 1
for: 1m
labels:
severity: email
annotations:
summary: "PostgreSQL is not processing queries: {{ $labels.instance }}"
description: "{{ $labels.instance }} is rejecting query requests from the exporter, and thus probably not allowing DNS requests to work either. User services should not be effected provided at least 1 node is still alive."
- alert: PostgreSQLSlowQueries
expr: avg(rate(pg_stat_activity_max_tx_duration{datname!~"template.*"}[2m])) by (datname) > 2 * 60
for: 2m
labels:
severity: email
annotations:
summary: "PostgreSQL high number of slow on {{ $labels.cluster }} for database {{ $labels.datname }} "
description: "PostgreSQL high number of slow queries {{ $labels.cluster }} for database {{ $labels.datname }} with a value of {{ $value }} "
- alert: PostgreSQLQPS
expr: avg(irate(pg_stat_database_xact_commit{datname!~"template.*"}[5m]) + irate(pg_stat_database_xact_rollback{datname!~"template.*"}[5m])) by (datname) > 10000
for: 5m
labels:
severity: email
annotations:
summary: "PostgreSQL high number of queries per second {{ $labels.cluster }} for database {{ $labels.datname }}"
description: "PostgreSQL high number of queries per second on {{ $labels.cluster }} for database {{ $labels.datname }} with a value of {{ $value }}"
- alert: PostgreSQLCacheHitRatio
expr: avg(rate(pg_stat_database_blks_hit{datname!~"template.*"}[5m]) / (rate(pg_stat_database_blks_hit{datname!~"template.*"}[5m]) + rate(pg_stat_database_blks_read{datname!~"template.*"}[5m]))) by (datname) < 0.98
for: 5m
labels:
severity: email
annotations:
summary: "PostgreSQL low cache hit rate on {{ $labels.cluster }} for database {{ $labels.datname }}"
description: "PostgreSQL low on cache hit rate on {{ $labels.cluster }} for database {{ $labels.datname }} with a value of {{ $value }}"

18
gh-assets-clone.sh Executable file
View File

@ -0,0 +1,18 @@
#!/bin/bash
# Script to setup the assets clone of the repository using GIT_ASSETS_BRANCH and
# GIT_API_KEY.
[ ! -z "$GIT_ASSETS_BRANCH" ] || exit 1
setup_git() {
git config --global user.email "travis@travis-ci.org" || exit 1
git config --global user.name "Travis CI" || exit 1
}
# Constants
ASSETS_DIR=".assets-branch"
# Clone the assets branch with the correct credentials
git clone --single-branch -b "$GIT_ASSETS_BRANCH" \
"https://${GIT_API_KEY}@github.com/${TRAVIS_REPO_SLUG}.git" "$ASSETS_DIR" || exit 1

29
gh-metrics-push.sh Executable file
View File

@ -0,0 +1,29 @@
#!/bin/bash
# Script to copy and push new metric versions to the assets branch.
[ ! -z "$GIT_ASSETS_BRANCH" ] || exit 1
[ ! -z "$GIT_API_KEY" ] || exit 1
version=$(git describe HEAD) || exit 1
# Constants
ASSETS_DIR=".assets-branch"
METRICS_DIR="$ASSETS_DIR/metriclists"
# Ensure metrics dir exists
mkdir -p "$METRICS_DIR/"
# Remove old files so we spot deletions
rm -f "$METRICS_DIR/.*.unique"
# Copy new files
cp -f -t "$METRICS_DIR/" ./.metrics.*.prom.unique || exit 1
# Enter the assets dir and push.
cd "$ASSETS_DIR" || exit 1
git add "metriclists" || exit 1
git commit -m "Added unique metrics for build from $version" || exit 1
git push origin "$GIT_ASSETS_BRANCH" || exit 1
exit 0

11
mage.go Normal file
View File

@ -0,0 +1,11 @@
// +build ignore
package main
import (
"os"
"github.com/magefile/mage/mage"
)
func main() { os.Exit(mage.Main()) }

786
magefile.go Normal file
View File

@ -0,0 +1,786 @@
// +build mage
// Self-contained go-project magefile.
// nolint: deadcode
package main
import (
"fmt"
"io/ioutil"
"os"
"path"
"path/filepath"
"regexp"
"runtime"
"strings"
"time"
"github.com/magefile/mage/mg"
"github.com/magefile/mage/sh"
"github.com/magefile/mage/target"
"errors"
"math/bits"
"strconv"
"github.com/mholt/archiver"
)
var curDir = func() string {
name, _ := os.Getwd()
return name
}()
const constCoverageDir = ".coverage"
const constToolDir = "tools"
const constBinDir = "bin"
const constReleaseDir = "release"
const constCmdDir = "cmd"
const constCoverFile = "cover.out"
const constAssets = "assets"
const constAssetsGenerated = "assets/generated"
var coverageDir = mustStr(filepath.Abs(path.Join(curDir, constCoverageDir)))
var toolDir = mustStr(filepath.Abs(path.Join(curDir, constToolDir)))
var binDir = mustStr(filepath.Abs(path.Join(curDir, constBinDir)))
var releaseDir = mustStr(filepath.Abs(path.Join(curDir, constReleaseDir)))
var cmdDir = mustStr(filepath.Abs(path.Join(curDir, constCmdDir)))
var assetsGenerated = mustStr(filepath.Abs(path.Join(curDir, constAssetsGenerated)))
// Calculate file paths
var toolsGoPath = toolDir
var toolsSrcDir = mustStr(filepath.Abs(path.Join(toolDir, "src")))
var toolsBinDir = mustStr(filepath.Abs(path.Join(toolDir, "bin")))
var toolsVendorDir = mustStr(filepath.Abs(path.Join(toolDir, "vendor")))
var outputDirs = []string{binDir, releaseDir, toolsGoPath, toolsBinDir,
toolsVendorDir, assetsGenerated, coverageDir}
var toolsEnv = map[string]string{"GOPATH": toolsGoPath}
var containerName = func() string {
if name := os.Getenv("CONTAINER_NAME"); name != "" {
return name
}
return "wrouesnel/postgres_exporter:latest"
}()
type Platform struct {
OS string
Arch string
BinSuffix string
}
func (p *Platform) String() string {
return fmt.Sprintf("%s-%s", p.OS, p.Arch)
}
func (p *Platform) PlatformDir() string {
platformDir := path.Join(binDir, fmt.Sprintf("%s_%s_%s", productName, versionShort, p.String()))
return platformDir
}
func (p *Platform) PlatformBin(cmd string) string {
platformBin := fmt.Sprintf("%s%s", cmd, p.BinSuffix)
return path.Join(p.PlatformDir(), platformBin)
}
func (p *Platform) ArchiveDir() string {
return fmt.Sprintf("%s_%s_%s", productName, versionShort, p.String())
}
func (p *Platform) ReleaseBase() string {
return path.Join(releaseDir, fmt.Sprintf("%s_%s_%s", productName, versionShort, p.String()))
}
// Supported platforms
var platforms []Platform = []Platform{
{"linux", "amd64", ""},
{"linux", "386", ""},
{"darwin", "amd64", ""},
{"darwin", "386", ""},
{"windows", "amd64", ".exe"},
{"windows", "386", ".exe"},
{"freebsd", "amd64", ""},
}
// productName can be overridden by environ product name
var productName = func() string {
if name := os.Getenv("PRODUCT_NAME"); name != "" {
return name
}
name, _ := os.Getwd()
return path.Base(name)
}()
// Source files
var goSrc []string
var goDirs []string
var goPkgs []string
var goCmds []string
var branch = func() string {
if v := os.Getenv("BRANCH"); v != "" {
return v
}
out, _ := sh.Output("git", "rev-parse", "--abbrev-ref", "HEAD")
return out
}()
var buildDate = func() string {
if v := os.Getenv("BUILDDATE"); v != "" {
return v
}
return time.Now().Format("2006-01-02T15:04:05-0700")
}()
var revision = func() string {
if v := os.Getenv("REVISION"); v != "" {
return v
}
out, _ := sh.Output("git", "rev-parse", "HEAD")
return out
}()
var version = func() string {
if v := os.Getenv("VERSION"); v != "" {
return v
}
out, _ := sh.Output("git", "describe", "--dirty")
if out == "" {
return "v0.0.0"
}
return out
}()
var versionShort = func() string {
if v := os.Getenv("VERSION_SHORT"); v != "" {
return v
}
out, _ := sh.Output("git", "describe", "--abbrev=0")
if out == "" {
return "v0.0.0"
}
return out
}()
var concurrency = func() int {
if v := os.Getenv("CONCURRENCY"); v != "" {
pv, err := strconv.ParseUint(v, 10, bits.UintSize)
if err != nil {
panic(err)
}
return int(pv)
}
return runtime.NumCPU()
}()
var linterDeadline = func() time.Duration {
if v := os.Getenv("LINTER_DEADLINE"); v != "" {
d, _ := time.ParseDuration(v)
if d != 0 {
return d
}
}
return time.Second * 60
}()
func Log(args ...interface{}) {
if mg.Verbose() {
fmt.Println(args...)
}
}
func init() {
// Set environment
os.Setenv("PATH", fmt.Sprintf("%s:%s", toolsBinDir, os.Getenv("PATH")))
Log("Build PATH: ", os.Getenv("PATH"))
Log("Concurrency:", concurrency)
goSrc = func() []string {
results := new([]string)
filepath.Walk(".", func(relpath string, info os.FileInfo, err error) error {
// Ensure absolute path so globs work
path, err := filepath.Abs(relpath)
if err != nil {
panic(err)
}
// Look for files
if info.IsDir() {
return nil
}
// Exclusions
for _, exclusion := range []string{toolDir, binDir, releaseDir, coverageDir} {
if strings.HasPrefix(path, exclusion) {
if info.IsDir() {
return filepath.SkipDir
}
return nil
}
}
if strings.Contains(path, "/vendor/") {
if info.IsDir() {
return filepath.SkipDir
}
return nil
}
if strings.Contains(path, ".git") {
if info.IsDir() {
return filepath.SkipDir
}
return nil
}
if !strings.HasSuffix(path, ".go") {
return nil
}
*results = append(*results, path)
return nil
})
return *results
}()
goDirs = func() []string {
resultMap := make(map[string]struct{})
for _, path := range goSrc {
absDir, err := filepath.Abs(filepath.Dir(path))
if err != nil {
panic(err)
}
resultMap[absDir] = struct{}{}
}
results := []string{}
for k := range resultMap {
results = append(results, k)
}
return results
}()
goPkgs = func() []string {
results := []string{}
out, err := sh.Output("go", "list", "./...")
if err != nil {
panic(err)
}
for _, line := range strings.Split(out, "\n") {
if !strings.Contains(line, "/vendor/") {
results = append(results, line)
}
}
return results
}()
goCmds = func() []string {
results := []string{}
finfos, err := ioutil.ReadDir(cmdDir)
if err != nil {
panic(err)
}
for _, finfo := range finfos {
results = append(results, finfo.Name())
}
return results
}()
// Ensure output dirs exist
for _, dir := range outputDirs {
os.MkdirAll(dir, os.FileMode(0777))
}
}
func mustStr(r string, err error) string {
if err != nil {
panic(err)
}
return r
}
func getCoreTools() []string {
staticTools := []string{
"github.com/kardianos/govendor",
"github.com/wadey/gocovmerge",
"github.com/mattn/goveralls",
"github.com/tmthrgd/go-bindata/go-bindata",
"github.com/GoASTScanner/gas/cmd/gas", // workaround for Ast scanner
"github.com/alecthomas/gometalinter",
}
return staticTools
}
func getMetalinters() []string {
// Gometalinter should now be on the command line
dynamicTools := []string{}
goMetalinterHelp, _ := sh.Output("gometalinter", "--help")
linterRx := regexp.MustCompile(`\s+\w+:\s*\((.+)\)`)
for _, l := range strings.Split(goMetalinterHelp, "\n") {
linter := linterRx.FindStringSubmatch(l)
if len(linter) > 1 {
dynamicTools = append(dynamicTools, linter[1])
}
}
return dynamicTools
}
func ensureVendorSrcLink() error {
Log("Symlink vendor to tools dir")
if err := sh.Rm(toolsSrcDir); err != nil {
return err
}
if err := os.Symlink(toolsVendorDir, toolsSrcDir); err != nil {
return err
}
return nil
}
// concurrencyLimitedBuild executes a certain number of commands limited by concurrency
func concurrencyLimitedBuild(buildCmds ...interface{}) error {
resultsCh := make(chan error, len(buildCmds))
concurrencyControl := make(chan struct{}, concurrency)
for _, buildCmd := range buildCmds {
go func(buildCmd interface{}) {
concurrencyControl <- struct{}{}
resultsCh <- buildCmd.(func() error)()
<-concurrencyControl
}(buildCmd)
}
// Doesn't work at the moment
// mg.Deps(buildCmds...)
results := []error{}
var resultErr error = nil
for len(results) < len(buildCmds) {
err := <-resultsCh
results = append(results, err)
if err != nil {
fmt.Println(err)
resultErr = errors.New("parallel build failed")
}
fmt.Printf("Finished %v of %v\n", len(results), len(buildCmds))
}
return resultErr
}
// Tools builds build tools of the project and is depended on by all other build targets.
func Tools() (err error) {
// Catch panics and convert to errors
defer func() {
if perr := recover(); perr != nil {
err = perr.(error)
}
}()
if err := ensureVendorSrcLink(); err != nil {
return err
}
toolBuild := func(toolType string, tools ...string) error {
toolTargets := []interface{}{}
for _, toolImport := range tools {
toolParts := strings.Split(toolImport, "/")
toolBin := path.Join(toolsBinDir, toolParts[len(toolParts)-1])
Log("Check for changes:", toolBin, toolsVendorDir)
changed, terr := target.Dir(toolBin, toolsVendorDir)
if terr != nil {
if !os.IsNotExist(terr) {
panic(terr)
}
changed = true
}
if changed {
localToolImport := toolImport
f := func() error { return sh.RunWith(toolsEnv, "go", "install", "-v", localToolImport) }
toolTargets = append(toolTargets, f)
}
}
Log("Build", toolType, "tools")
if berr := concurrencyLimitedBuild(toolTargets...); berr != nil {
return berr
}
return nil
}
if berr := toolBuild("static", getCoreTools()...); berr != nil {
return berr
}
if berr := toolBuild("static", getMetalinters()...); berr != nil {
return berr
}
return nil
}
// UpdateTools automatically updates tool dependencies to the latest version.
func UpdateTools() error {
if err := ensureVendorSrcLink(); err != nil {
return err
}
// Ensure govendor is up to date without doing anything
govendorPkg := "github.com/kardianos/govendor"
govendorParts := strings.Split(govendorPkg, "/")
govendorBin := path.Join(toolsBinDir, govendorParts[len(govendorParts)-1])
sh.RunWith(toolsEnv, "go", "get", "-v", "-u", govendorPkg)
if changed, cerr := target.Dir(govendorBin, toolsSrcDir); changed || os.IsNotExist(cerr) {
if err := sh.RunWith(toolsEnv, "go", "install", "-v", govendorPkg); err != nil {
return err
}
} else if cerr != nil {
panic(cerr)
}
// Set current directory so govendor has the right path
previousPwd, wderr := os.Getwd()
if wderr != nil {
return wderr
}
if err := os.Chdir(toolDir); err != nil {
return err
}
// govendor fetch core tools
for _, toolImport := range append(getCoreTools(), getMetalinters()...) {
sh.RunV("govendor", "fetch", "-v", toolImport)
}
// change back to original working directory
if err := os.Chdir(previousPwd); err != nil {
return err
}
return nil
}
// Assets builds binary assets to be bundled into the binary.
func Assets() error {
mg.Deps(Tools)
if err := os.MkdirAll("assets/generated", os.FileMode(0777)); err != nil {
return err
}
return sh.RunV("go-bindata", "-pkg=assets", "-o", "assets/bindata.go", "-ignore=bindata.go",
"-ignore=.*.map$", "-prefix=assets/generated", "assets/generated/...")
}
// Lint runs gometalinter for code quality. CI will run this before accepting PRs.
func Lint() error {
mg.Deps(Tools)
args := []string{"-j", fmt.Sprintf("%v", concurrency), fmt.Sprintf("--deadline=%s",
linterDeadline.String()), "--enable-all", "--line-length=120",
"--disable=gocyclo", "--disable=testify", "--disable=test", "--disable=lll", "--exclude=assets/bindata.go"}
return sh.RunV("gometalinter", append(args, goDirs...)...)
}
// Style checks formatting of the file. CI will run this before acceptiing PRs.
func Style() error {
mg.Deps(Tools)
args := []string{"--disable-all", "--enable=gofmt", "--enable=goimports"}
return sh.RunV("gometalinter", append(args, goSrc...)...)
}
// Fmt automatically formats all source code files
func Fmt() error {
mg.Deps(Tools)
fmtErr := sh.RunV("gofmt", append([]string{"-s", "-w"}, goSrc...)...)
if fmtErr != nil {
return fmtErr
}
impErr := sh.RunV("goimports", append([]string{"-w"}, goSrc...)...)
if impErr != nil {
return fmtErr
}
return nil
}
func listCoverageFiles() ([]string, error) {
result := []string{}
finfos, derr := ioutil.ReadDir(coverageDir)
if derr != nil {
return result, derr
}
for _, finfo := range finfos {
result = append(result, path.Join(coverageDir, finfo.Name()))
}
return result, nil
}
// Test run test suite
func Test() error {
mg.Deps(Tools)
// Ensure coverage directory exists
if err := os.MkdirAll(coverageDir, os.FileMode(0777)); err != nil {
return err
}
// Clean up coverage directory
coverFiles, derr := listCoverageFiles()
if derr != nil {
return derr
}
for _, coverFile := range coverFiles {
if err := sh.Rm(coverFile); err != nil {
return err
}
}
// Run tests
coverProfiles := []string{}
for _, pkg := range goPkgs {
coverProfile := path.Join(coverageDir, fmt.Sprintf("%s%s", strings.Replace(pkg, "/", "-", -1), ".out"))
testErr := sh.Run("go", "test", "-v", "-covermode", "count", fmt.Sprintf("-coverprofile=%s", coverProfile),
pkg)
if testErr != nil {
return testErr
}
coverProfiles = append(coverProfiles, coverProfile)
}
return nil
}
// Build the intgration test binary
func IntegrationTestBinary() error {
changed, err := target.Path("postgres_exporter_integration_test", goSrc...)
if (changed && (err == nil)) || os.IsNotExist(err) {
return sh.RunWith(map[string]string{"CGO_ENABLED": "0"}, "go", "test", "./cmd/postgres_exporter",
"-c", "-tags", "integration",
"-a", "-ldflags", "-extldflags '-static'",
"-X", fmt.Sprintf("main.Branch=%s", branch),
"-X", fmt.Sprintf("main.BuildDate=%s", buildDate),
"-X", fmt.Sprintf("main.Revision=%s", revision),
"-X", fmt.Sprintf("main.VersionShort=%s", versionShort),
"-o", "postgres_exporter_integration_test", "-cover", "-covermode", "count")
}
return err
}
// TestIntegration runs integration tests
func TestIntegration() error {
mg.Deps(Binary, IntegrationTestBinary)
exporterPath := mustStr(filepath.Abs("postgres_exporter"))
testBinaryPath := mustStr(filepath.Abs("postgres_exporter_integration_test"))
testScriptPath := mustStr(filepath.Abs("postgres_exporter_integration_test_script"))
integrationCoverageProfile := path.Join(coverageDir, "cover.integration.out")
return sh.RunV("cmd/postgres_exporter/tests/test-smoke", exporterPath,
fmt.Sprintf("%s %s %s", testScriptPath, testBinaryPath, integrationCoverageProfile))
}
// Coverage sums up the coverage profiles in .coverage. It does not clean up after itself or before.
func Coverage() error {
// Clean up coverage directory
coverFiles, derr := listCoverageFiles()
if derr != nil {
return derr
}
mergedCoverage, err := sh.Output("gocovmerge", coverFiles...)
if err != nil {
return err
}
return ioutil.WriteFile(constCoverFile, []byte(mergedCoverage), os.FileMode(0777))
}
// All runs a full suite suitable for CI
func All() error {
mg.SerialDeps(Style, Lint, Test, TestIntegration, Coverage, Release)
return nil
}
// Release builds release archives under the release/ directory
func Release() error {
mg.Deps(ReleaseBin)
for _, platform := range platforms {
owd, wderr := os.Getwd()
if wderr != nil {
return wderr
}
os.Chdir(binDir)
if platform.OS == "windows" {
// build a zip binary as well
err := archiver.Zip.Make(fmt.Sprintf("%s.zip", platform.ReleaseBase()), []string{platform.ArchiveDir()})
if err != nil {
return err
}
}
// build tar gz
err := archiver.TarGz.Make(fmt.Sprintf("%s.tar.gz", platform.ReleaseBase()), []string{platform.ArchiveDir()})
if err != nil {
return err
}
os.Chdir(owd)
}
return nil
}
func makeBuilder(cmd string, platform Platform) func() error {
f := func() error {
// Depend on assets
mg.Deps(Assets)
cmdSrc := fmt.Sprintf("./%s/%s", mustStr(filepath.Rel(curDir, cmdDir)), cmd)
Log("Make platform binary directory:", platform.PlatformDir())
if err := os.MkdirAll(platform.PlatformDir(), os.FileMode(0777)); err != nil {
return err
}
Log("Checking for changes:", platform.PlatformBin(cmd))
if changed, err := target.Path(platform.PlatformBin(cmd), goSrc...); !changed {
if err != nil {
if !os.IsNotExist(err) {
return err
}
} else {
return nil
}
}
fmt.Println("Building", platform.PlatformBin(cmd))
return sh.RunWith(map[string]string{"CGO_ENABLED": "0", "GOOS": platform.OS, "GOARCH": platform.Arch},
"go", "build", "-a", "-ldflags", fmt.Sprintf("-extldflags '-static' -X main.Version=%s", version),
"-o", platform.PlatformBin(cmd), cmdSrc)
}
return f
}
func getCurrentPlatform() *Platform {
var curPlatform *Platform
for _, p := range platforms {
if p.OS == runtime.GOOS && p.Arch == runtime.GOARCH {
storedP := p
curPlatform = &storedP
}
}
Log("Determined current platform:", curPlatform)
return curPlatform
}
// Binary build a binary for the current platform
func Binary() error {
curPlatform := getCurrentPlatform()
if curPlatform == nil {
return errors.New("current platform is not supported")
}
for _, cmd := range goCmds {
err := makeBuilder(cmd, *curPlatform)()
if err != nil {
return err
}
// Make a root symlink to the build
cmdPath := path.Join(curDir, cmd)
os.Remove(cmdPath)
if err := os.Symlink(curPlatform.PlatformBin(cmd), cmdPath); err != nil {
return err
}
}
return nil
}
// ReleaseBin builds cross-platform release binaries under the bin/ directory
func ReleaseBin() error {
buildCmds := []interface{}{}
for _, cmd := range goCmds {
for _, platform := range platforms {
buildCmds = append(buildCmds, makeBuilder(cmd, platform))
}
}
resultsCh := make(chan error, len(buildCmds))
concurrencyControl := make(chan struct{}, concurrency)
for _, buildCmd := range buildCmds {
go func(buildCmd interface{}) {
concurrencyControl <- struct{}{}
resultsCh <- buildCmd.(func() error)()
<-concurrencyControl
}(buildCmd)
}
// Doesn't work at the moment
// mg.Deps(buildCmds...)
results := []error{}
var resultErr error = nil
for len(results) < len(buildCmds) {
err := <-resultsCh
results = append(results, err)
if err != nil {
fmt.Println(err)
resultErr = errors.New("parallel build failed")
}
fmt.Printf("Finished %v of %v\n", len(results), len(buildCmds))
}
return resultErr
}
// Docker builds the docker image
func Docker() error {
mg.Deps(Binary)
p := getCurrentPlatform()
if p == nil {
return errors.New("current platform is not supported")
}
return sh.RunV("docker", "build",
fmt.Sprintf("--build-arg=binary=%s",
mustStr(filepath.Rel(curDir, p.PlatformBin("postgres_exporter")))),
"-t", containerName, ".")
}
// Clean deletes build output and cleans up the working directory
func Clean() error {
for _, name := range goCmds {
if err := sh.Rm(path.Join(binDir, name)); err != nil {
return err
}
}
for _, name := range outputDirs {
if err := sh.Rm(name); err != nil {
return err
}
}
return nil
}
// Debug prints the value of internal state variables
func Debug() error {
fmt.Println("Source Files:", goSrc)
fmt.Println("Packages:", goPkgs)
fmt.Println("Directories:", goDirs)
fmt.Println("Command Paths:", goCmds)
fmt.Println("Output Dirs:", outputDirs)
fmt.Println("Tool Src Dir:", toolsSrcDir)
fmt.Println("Tool Vendor Dir:", toolsVendorDir)
fmt.Println("Tool GOPATH:", toolsGoPath)
fmt.Println("PATH:", os.Getenv("PATH"))
return nil
}
// Autogen configure local git repository with commit hooks
func Autogen() error {
fmt.Println("Installing git hooks in local repository...")
return os.Link(path.Join(curDir, toolDir, "pre-commit"), ".git/hooks/pre-commit")
}

40
postgres-metrics-get-changes.sh Executable file
View File

@ -0,0 +1,40 @@
#!/bin/bash
# Script to parse a text exposition format file into a unique list of metrics
# output by the exporter and then build lists of added/removed metrics.
old_src="$1"
if [ ! -d "$old_src" ] ; then
mkdir -p "$old_src"
fi
function generate_add_removed() {
type="$1"
pg_version="$2"
old_version="$3"
new_version="$4"
if [ ! -e "$old_version" ] ; then
touch "$old_version"
fi
comm -23 "$old_version" "$new_version" > ".metrics.${type}.${pg_version}.removed"
comm -13 "$old_version" "$new_version" > ".metrics.${type}.${pg_version}.added"
}
for raw_prom in $(echo .*.prom) ; do
# Get the type and version
type=$(echo "$raw_prom" | cut -d'.' -f3)
pg_version=$(echo "$raw_prom" | cut -d'.' -f4- | sed 's/\.prom$//g')
unique_file="${raw_prom}.unique"
old_unique_file="$old_src/$unique_file"
# Strip, sort and deduplicate the label names
grep -v '#' "$raw_prom" | \
rev | cut -d' ' -f2- | \
rev | cut -d'{' -f1 | \
sort | \
uniq > "$unique_file"
generate_add_removed "$type" "$pg_version" "$old_unique_file" "$unique_file"
done

89
postgres_exporter.rc Normal file
View File

@ -0,0 +1,89 @@
#!/bin/sh
# PROVIDE: postgres_exporter
# REQUIRE: LOGIN
# KEYWORD: shutdown
#
# rc-script for postgres_exporter
#
#
# Add the following lines to /etc/rc.conf.local or /etc/rc.conf
# to enable this service:
#
# postgres_exporter_enable (bool): Set to NO by default.
# Set it to YES to enable postgres_exporter.
# postgres_exporter_user (string): Set user that postgres_exporter will run under
# Default is "nobody".
# postgres_exporter_group (string): Set group that postgres_exporter will run under
# Default is "nobody".
# postgres_exporter_args (string): Set extra arguments to pass to postgres_exporter
# Default is "".
# postgres_exporter_listen_address (string):Set ip:port to listen on for web interface and telemetry.
# Defaults to ":9187"
# postgres_exporter_pg_user (string): Set the Postgres database user
# Defaults to "postgres_exporter"
# postgres_exporter_pg_pass (string): Set the Postgres datase password
# Default is empty
# postgres_exporter_pg_host (string): Set the Postgres database server
# Defaults to "localhost"
# postgres_exporter_pg_port (string): Set the Postgres database port
# Defaults to "5432"
# Add extra arguments via "postgres_exporter_args" which could be choosen from:
# (see $ postgres_exporter --help)
#
# -dumpmaps
# Do not run, simply dump the maps.
# -extend.query-path string
# Path to custom queries to run.
# -log.level value
# Only log messages with the given severity or above. Valid levels: [debug, info, warn, error, fatal].
# -version
# print version and exit
# -web.telemetry-path string
# Path under which to expose metrics. (default "/metrics")
# -log.format value
# If set use a syslog logger or JSON logging. Example: logger:syslog?appname=bob&local=7 or logger:stdout?json=true. Defaults to stderr.
# -extend.query-path string
# Path to custom queries to run.
. /etc/rc.subr
name=postgres_exporter
rcvar=postgres_exporter_enable
load_rc_config $name
: ${postgres_exporter_enable:="NO"}
: ${postgres_exporter_user:="nobody"}
: ${postgres_exporter_group:="nobody"}
: ${postgres_exporter_args:=""}
: ${postgres_exporter_listen_address:=":9187"}
: ${postgres_exporter_pg_user:="postgres_exporter"}
: ${postgres_exporter_pg_pass:=""}
: ${postgres_exporter_pg_host:="localhost"}
: ${postgres_exporter_pg_port:="5432"}
postgres_exporter_data_source_name="postgresql://${postgres_exporter_pg_user}:${postgres_exporter_pg_pass}@${postgres_exporter_pg_host}:${postgres_exporter_pg_port}/postgres?sslmode=disable"
pidfile=/var/run/postgres_exporter.pid
command="/usr/sbin/daemon"
procname="/usr/local/bin/postgres_exporter"
command_args="-p ${pidfile} /usr/bin/env DATA_SOURCE_NAME="${postgres_exporter_data_source_name}" ${procname} \
-web.listen-address=${postgres_exporter_listen_address} \
${postgres_exporter_args}"
start_precmd=postgres_exporter_startprecmd
postgres_exporter_startprecmd()
{
if [ ! -e ${pidfile} ]; then
install -o ${postgres_exporter_user} -g ${postgres_exporter_group} /dev/null ${pidfile};
fi
}
load_rc_config $name
run_rc_command "$1"

View File

@ -0,0 +1,18 @@
#!/bin/bash
# This script wraps the integration test binary so it produces concatenated
# test output.
test_binary=$1
shift
output_cov=$1
shift
echo "Test Binary: $test_binary" 1>&2
echo "Coverage File: $output_cov" 1>&2
echo "mode: count" > $output_cov
test_cov=$(mktemp)
$test_binary -test.coverprofile=$test_cov $@ || exit 1
tail -n +2 $test_cov >> $output_cov
rm -f $test_cov

205
queries.yaml Normal file
View File

@ -0,0 +1,205 @@
pg_replication:
query: "SELECT EXTRACT(EPOCH FROM (now() - pg_last_xact_replay_timestamp())) as lag"
master: true
metrics:
- lag:
usage: "GAUGE"
description: "Replication lag behind master in seconds"
pg_postmaster:
query: "SELECT pg_postmaster_start_time as start_time_seconds from pg_postmaster_start_time()"
master: true
metrics:
- start_time_seconds:
usage: "GAUGE"
description: "Time at which postmaster started"
pg_stat_user_tables:
query: "SELECT current_database() datname, schemaname, relname, seq_scan, seq_tup_read, idx_scan, idx_tup_fetch, n_tup_ins, n_tup_upd, n_tup_del, n_tup_hot_upd, n_live_tup, n_dead_tup, n_mod_since_analyze, COALESCE(last_vacuum, '1970-01-01Z'), COALESCE(last_vacuum, '1970-01-01Z') as last_vacuum, COALESCE(last_autovacuum, '1970-01-01Z') as last_autovacuum, COALESCE(last_analyze, '1970-01-01Z') as last_analyze, COALESCE(last_autoanalyze, '1970-01-01Z') as last_autoanalyze, vacuum_count, autovacuum_count, analyze_count, autoanalyze_count FROM pg_stat_user_tables"
metrics:
- datname:
usage: "LABEL"
description: "Name of current database"
- schemaname:
usage: "LABEL"
description: "Name of the schema that this table is in"
- relname:
usage: "LABEL"
description: "Name of this table"
- seq_scan:
usage: "COUNTER"
description: "Number of sequential scans initiated on this table"
- seq_tup_read:
usage: "COUNTER"
description: "Number of live rows fetched by sequential scans"
- idx_scan:
usage: "COUNTER"
description: "Number of index scans initiated on this table"
- idx_tup_fetch:
usage: "COUNTER"
description: "Number of live rows fetched by index scans"
- n_tup_ins:
usage: "COUNTER"
description: "Number of rows inserted"
- n_tup_upd:
usage: "COUNTER"
description: "Number of rows updated"
- n_tup_del:
usage: "COUNTER"
description: "Number of rows deleted"
- n_tup_hot_upd:
usage: "COUNTER"
description: "Number of rows HOT updated (i.e., with no separate index update required)"
- n_live_tup:
usage: "GAUGE"
description: "Estimated number of live rows"
- n_dead_tup:
usage: "GAUGE"
description: "Estimated number of dead rows"
- n_mod_since_analyze:
usage: "GAUGE"
description: "Estimated number of rows changed since last analyze"
- last_vacuum:
usage: "GAUGE"
description: "Last time at which this table was manually vacuumed (not counting VACUUM FULL)"
- last_autovacuum:
usage: "GAUGE"
description: "Last time at which this table was vacuumed by the autovacuum daemon"
- last_analyze:
usage: "GAUGE"
description: "Last time at which this table was manually analyzed"
- last_autoanalyze:
usage: "GAUGE"
description: "Last time at which this table was analyzed by the autovacuum daemon"
- vacuum_count:
usage: "COUNTER"
description: "Number of times this table has been manually vacuumed (not counting VACUUM FULL)"
- autovacuum_count:
usage: "COUNTER"
description: "Number of times this table has been vacuumed by the autovacuum daemon"
- analyze_count:
usage: "COUNTER"
description: "Number of times this table has been manually analyzed"
- autoanalyze_count:
usage: "COUNTER"
description: "Number of times this table has been analyzed by the autovacuum daemon"
pg_statio_user_tables:
query: "SELECT current_database() datname, schemaname, relname, heap_blks_read, heap_blks_hit, idx_blks_read, idx_blks_hit, toast_blks_read, toast_blks_hit, tidx_blks_read, tidx_blks_hit FROM pg_statio_user_tables"
metrics:
- datname:
usage: "LABEL"
description: "Name of current database"
- schemaname:
usage: "LABEL"
description: "Name of the schema that this table is in"
- relname:
usage: "LABEL"
description: "Name of this table"
- heap_blks_read:
usage: "COUNTER"
description: "Number of disk blocks read from this table"
- heap_blks_hit:
usage: "COUNTER"
description: "Number of buffer hits in this table"
- idx_blks_read:
usage: "COUNTER"
description: "Number of disk blocks read from all indexes on this table"
- idx_blks_hit:
usage: "COUNTER"
description: "Number of buffer hits in all indexes on this table"
- toast_blks_read:
usage: "COUNTER"
description: "Number of disk blocks read from this table's TOAST table (if any)"
- toast_blks_hit:
usage: "COUNTER"
description: "Number of buffer hits in this table's TOAST table (if any)"
- tidx_blks_read:
usage: "COUNTER"
description: "Number of disk blocks read from this table's TOAST table indexes (if any)"
- tidx_blks_hit:
usage: "COUNTER"
description: "Number of buffer hits in this table's TOAST table indexes (if any)"
pg_database:
query: "SELECT pg_database.datname, pg_database_size(pg_database.datname) as size FROM pg_database"
master: true
cache_seconds: 30
metrics:
- datname:
usage: "LABEL"
description: "Name of the database"
- size_bytes:
usage: "GAUGE"
description: "Disk space used by the database"
pg_stat_statements:
query: "SELECT t2.rolname, t3.datname, queryid, calls, total_time / 1000 as total_time_seconds, min_time / 1000 as min_time_seconds, max_time / 1000 as max_time_seconds, mean_time / 1000 as mean_time_seconds, stddev_time / 1000 as stddev_time_seconds, rows, shared_blks_hit, shared_blks_read, shared_blks_dirtied, shared_blks_written, local_blks_hit, local_blks_read, local_blks_dirtied, local_blks_written, temp_blks_read, temp_blks_written, blk_read_time / 1000 as blk_read_time_seconds, blk_write_time / 1000 as blk_write_time_seconds FROM pg_stat_statements t1 join pg_roles t2 on (t1.userid=t2.oid) join pg_database t3 on (t1.dbid=t3.oid)"
master: true
metrics:
- rolname:
usage: "LABEL"
description: "Name of user"
- datname:
usage: "LABEL"
description: "Name of database"
- queryid:
usage: "LABEL"
description: "Query ID"
- calls:
usage: "COUNTER"
description: "Number of times executed"
- total_time_seconds:
usage: "COUNTER"
description: "Total time spent in the statement, in milliseconds"
- min_time_seconds:
usage: "GAUGE"
description: "Minimum time spent in the statement, in milliseconds"
- max_time_seconds:
usage: "GAUGE"
description: "Maximum time spent in the statement, in milliseconds"
- mean_time_seconds:
usage: "GAUGE"
description: "Mean time spent in the statement, in milliseconds"
- stddev_time_seconds:
usage: "GAUGE"
description: "Population standard deviation of time spent in the statement, in milliseconds"
- rows:
usage: "COUNTER"
description: "Total number of rows retrieved or affected by the statement"
- shared_blks_hit:
usage: "COUNTER"
description: "Total number of shared block cache hits by the statement"
- shared_blks_read:
usage: "COUNTER"
description: "Total number of shared blocks read by the statement"
- shared_blks_dirtied:
usage: "COUNTER"
description: "Total number of shared blocks dirtied by the statement"
- shared_blks_written:
usage: "COUNTER"
description: "Total number of shared blocks written by the statement"
- local_blks_hit:
usage: "COUNTER"
description: "Total number of local block cache hits by the statement"
- local_blks_read:
usage: "COUNTER"
description: "Total number of local blocks read by the statement"
- local_blks_dirtied:
usage: "COUNTER"
description: "Total number of local blocks dirtied by the statement"
- local_blks_written:
usage: "COUNTER"
description: "Total number of local blocks written by the statement"
- temp_blks_read:
usage: "COUNTER"
description: "Total number of temp blocks read by the statement"
- temp_blks_written:
usage: "COUNTER"
description: "Total number of temp blocks written by the statement"
- blk_read_time_seconds:
usage: "COUNTER"
description: "Total time the statement spent reading blocks, in milliseconds (if track_io_timing is enabled, otherwise zero)"
- blk_write_time_seconds:
usage: "COUNTER"
description: "Total time the statement spent writing blocks, in milliseconds (if track_io_timing is enabled, otherwise zero)"

4
tools/.gitignore vendored Normal file
View File

@ -0,0 +1,4 @@
/pkg
/bin
/tools.deps
/metatools.deps

9
tools/README.md Normal file
View File

@ -0,0 +1,9 @@
Vendored versions of the build tooling.
gocovmerge is used to merge coverage reports for uploading to a service like
coveralls, and gometalinter conveniently incorporates multiple Go linters.
By vendoring both, we gain a self-contained build system.
Run `make all` to build, and `make update` to update.

803
tools/vendor/vendor.json vendored Normal file
View File

@ -0,0 +1,803 @@
{
"comment": "",
"ignore": "test",
"package": [
{
"checksumSHA1": "4Tc07iR3HloUYC4HNT4xc0875WY=",
"path": "github.com/Bowery/prompt",
"revision": "0f1139e9a1c74b57ccce6bdb3cd2f7cd04dd3449",
"revisionTime": "2017-02-19T07:16:37Z"
},
{
"checksumSHA1": "LnZqwaKHuOH0bcpDUrqrcGvER/o=",
"path": "github.com/GoASTScanner/gas",
"revision": "1d9f816ca5d8224320a72b8aefbdb6f5d3692da6",
"revisionTime": "2018-03-05T12:20:24Z"
},
{
"checksumSHA1": "Z03LJp4+mkPL1JTZzzizqWdtZSs=",
"path": "github.com/GoASTScanner/gas/cmd/gas",
"revision": "1d9f816ca5d8224320a72b8aefbdb6f5d3692da6",
"revisionTime": "2018-03-05T12:20:24Z"
},
{
"checksumSHA1": "sK1dOo48F424xLCvE+ic8tRk7i8=",
"path": "github.com/GoASTScanner/gas/core",
"revision": "1beec25f7754273c9672a3368ea7048d4e73138e",
"revisionTime": "2017-04-11T19:38:53Z"
},
{
"checksumSHA1": "ZSTQB9oOviIo0K+41PBciAFINHU=",
"path": "github.com/GoASTScanner/gas/output",
"revision": "1d9f816ca5d8224320a72b8aefbdb6f5d3692da6",
"revisionTime": "2018-03-05T12:20:24Z"
},
{
"checksumSHA1": "VQoUd/3JzI8CQdaLrAVN723MGZM=",
"path": "github.com/GoASTScanner/gas/rules",
"revision": "1d9f816ca5d8224320a72b8aefbdb6f5d3692da6",
"revisionTime": "2018-03-05T12:20:24Z"
},
{
"checksumSHA1": "cItvKwnl+gkO2j0Q2964efC+vTw=",
"path": "github.com/alecthomas/gocyclo",
"revision": "aa8f8b160214d8dfccfe3e17e578dd0fcc6fede7",
"revisionTime": "2015-02-08T22:17:26Z"
},
{
"checksumSHA1": "xEwn4Ufny5rQEJ8f9hao7gkd86g=",
"path": "github.com/alecthomas/gometalinter",
"revision": "39a4757a714702004d3fdca45ff83af4dc484af9",
"revisionTime": "2018-02-23T20:08:23Z"
},
{
"checksumSHA1": "fCc3grA7vIxfBru7R3SqjcW+oLI=",
"path": "github.com/alecthomas/units",
"revision": "2efee857e7cfd4f3d0138cc3cbb1b4966962b93a",
"revisionTime": "2015-10-22T06:55:26Z"
},
{
"checksumSHA1": "kjygOPbr5jsND2nU4NJnyyDRVF8=",
"path": "github.com/alexflint/go-arg",
"revision": "cef6506c97e5731da728c374ff3523e481026423",
"revisionTime": "2017-03-30T21:10:29Z"
},
{
"checksumSHA1": "M5vBatiAKUjyLYrb9nS+6QbpEjE=",
"path": "github.com/alexflint/go-scalar",
"revision": "e80c3b7ed292b052c7083b6fd7154a8422c33f65",
"revisionTime": "2017-02-16T02:04:25Z"
},
{
"checksumSHA1": "rDM1YOCSZE4BLxZoBJV56/VmZSo=",
"path": "github.com/alexkohler/nakedret",
"revision": "c0e305a4f690fed163d47628bcc06a6d5655bf92",
"revisionTime": "2017-11-06T22:32:15Z"
},
{
"checksumSHA1": "z6mKUmWeXRT0k+xrXxA5CLKOWiE=",
"path": "github.com/client9/misspell",
"revision": "1d9ab7749ee27131547244ff2f9953d235b591fb",
"revisionTime": "2017-05-30T22:15:07Z"
},
{
"checksumSHA1": "3Lbx+qNi8brwKa9dU41O6SEOW6c=",
"path": "github.com/client9/misspell/cmd/misspell",
"revision": "9ce5d979ffdaca6385988d7ad1079a33ec942d20",
"revisionTime": "2017-09-28T00:02:06Z"
},
{
"checksumSHA1": "ULnk7ggN82JFO0ZdBCmSsQH3Vh8=",
"path": "github.com/dchest/safefile",
"revision": "855e8d98f1852d48dde521e0522408d1fe7e836a",
"revisionTime": "2015-10-22T10:31:44Z"
},
{
"checksumSHA1": "aLXmB5i+PFfM/dVqfHxoB3a8v/g=",
"path": "github.com/dnephin/govet",
"revision": "4a96d43e39d340b63daa8bc5576985aa599885f6",
"revisionTime": "2017-10-12T18:51:37Z",
"version": "fork",
"versionExact": "fork"
},
{
"checksumSHA1": "Ba6cj2wCpDZcjE0kZ4Q32PJW4fg=",
"path": "github.com/dnephin/govet/internal/cfg",
"revision": "4a96d43e39d340b63daa8bc5576985aa599885f6",
"revisionTime": "2017-10-12T18:51:37Z",
"version": "fork",
"versionExact": "fork"
},
{
"checksumSHA1": "ttQiZmni3k7Tbfi4/CX3JG6NjAw=",
"path": "github.com/dnephin/govet/internal/whitelist",
"revision": "4a96d43e39d340b63daa8bc5576985aa599885f6",
"revisionTime": "2017-10-12T18:51:37Z",
"version": "fork",
"versionExact": "fork"
},
{
"checksumSHA1": "+U50xksZS8g53vruOP7px7cLilE=",
"path": "github.com/golang/lint",
"revision": "c5fb716d6688a859aae56d26d3e6070808df29f7",
"revisionTime": "2017-06-02T23:41:31Z"
},
{
"checksumSHA1": "SsCcmchQUYbzaKLaJ0zfrd9DdrI=",
"path": "github.com/golang/lint/golint",
"revision": "fb4f8c1d3a179654f93ef7e91d68fc7b1de6e88f",
"revisionTime": "2018-03-01T17:26:52Z"
},
{
"checksumSHA1": "e/Kc2UOy1lKAy31xWlK37M1r2e8=",
"path": "github.com/google/shlex",
"revision": "6f45313302b9c56850fc17f99e40caebce98c716",
"revisionTime": "2015-01-27T13:39:51Z"
},
{
"checksumSHA1": "TKaX+8YCZQwYv7Kiy2o/E86V0aE=",
"path": "github.com/gordonklaus/ineffassign",
"revision": "7bae11eba15a3285c75e388f77eb6357a2d73ee2",
"revisionTime": "2017-11-18T19:06:32Z"
},
{
"checksumSHA1": "DbSCKltce7IrgpDUF8+C7J+z+GU=",
"path": "github.com/jgautheron/goconst",
"revision": "6a7633b712b6fb1d6821d33851d086a1d545dacd",
"revisionTime": "2016-05-14T19:25:19Z"
},
{
"checksumSHA1": "0tPXJ5Wul0FXiUDwVWsd/RA3tWg=",
"path": "github.com/jgautheron/goconst/cmd/goconst",
"revision": "9740945f5dcb78c2faa8eedcce78c2a04aa6e1e9",
"revisionTime": "2017-07-03T17:01:52Z"
},
{
"checksumSHA1": "NKvKUGq0lp/GjLS7Ffp7BAjcoTg=",
"path": "github.com/kardianos/govendor",
"revision": "c5ee5dc32350319e3423e570818eaa818601b789",
"revisionTime": "2018-02-09T21:39:04Z"
},
{
"checksumSHA1": "m24kWw3bFoAkKVvTjmxSLsywdHY=",
"path": "github.com/kardianos/govendor/cliprompt",
"revision": "c86c10d612bf08e847456ce91d495eb69ad87087",
"revisionTime": "2017-05-06T05:20:04Z"
},
{
"checksumSHA1": "/6r+luJ0EK07RknNd0zrubHtMuQ=",
"path": "github.com/kardianos/govendor/context",
"revision": "c86c10d612bf08e847456ce91d495eb69ad87087",
"revisionTime": "2017-05-06T05:20:04Z"
},
{
"checksumSHA1": "Y0WMEgLxFAzHAIxFViFSWh7dqqY=",
"path": "github.com/kardianos/govendor/help",
"revision": "c86c10d612bf08e847456ce91d495eb69ad87087",
"revisionTime": "2017-05-06T05:20:04Z"
},
{
"checksumSHA1": "SPRzsXaOsg9dENhLfHRjmFMmGQM=",
"path": "github.com/kardianos/govendor/internal/pathos",
"revision": "c86c10d612bf08e847456ce91d495eb69ad87087",
"revisionTime": "2017-05-06T05:20:04Z"
},
{
"checksumSHA1": "Bl6I6yMiK60dzOAfyO7As6MSPIk=",
"path": "github.com/kardianos/govendor/internal/vfilepath",
"revision": "c86c10d612bf08e847456ce91d495eb69ad87087",
"revisionTime": "2017-05-06T05:20:04Z"
},
{
"checksumSHA1": "2Vg+J79rEhmtnprErQ7fTZdneIk=",
"path": "github.com/kardianos/govendor/internal/vos",
"revision": "c86c10d612bf08e847456ce91d495eb69ad87087",
"revisionTime": "2017-05-06T05:20:04Z"
},
{
"checksumSHA1": "rpK9ccIJkLV4IbKb3lUjUo5DSfU=",
"path": "github.com/kardianos/govendor/migrate",
"revision": "c86c10d612bf08e847456ce91d495eb69ad87087",
"revisionTime": "2017-05-06T05:20:04Z"
},
{
"checksumSHA1": "wL4SaLS/HTn32Gmq8kpYRr/cn68=",
"path": "github.com/kardianos/govendor/pkgspec",
"revision": "c86c10d612bf08e847456ce91d495eb69ad87087",
"revisionTime": "2017-05-06T05:20:04Z"
},
{
"checksumSHA1": "I9oQBOtXoOinofTJrBE+zI+vDCs=",
"path": "github.com/kardianos/govendor/prompt",
"revision": "c86c10d612bf08e847456ce91d495eb69ad87087",
"revisionTime": "2017-05-06T05:20:04Z"
},
{
"checksumSHA1": "tXbK0YAL7/ZrLWkokBrLdp30xjw=",
"path": "github.com/kardianos/govendor/run",
"revision": "c86c10d612bf08e847456ce91d495eb69ad87087",
"revisionTime": "2017-05-06T05:20:04Z"
},
{
"checksumSHA1": "ZDlz1nWDmErU501lCChKbTT3kEs=",
"path": "github.com/kardianos/govendor/vcs",
"revision": "c86c10d612bf08e847456ce91d495eb69ad87087",
"revisionTime": "2017-05-06T05:20:04Z"
},
{
"checksumSHA1": "oXa3HaUXhXktMp4C0xTOSrzPDTE=",
"path": "github.com/kardianos/govendor/vendorfile",
"revision": "c86c10d612bf08e847456ce91d495eb69ad87087",
"revisionTime": "2017-05-06T05:20:04Z"
},
{
"checksumSHA1": "QSxPR3g/AtzGMJSGrdHH6bBQnTc=",
"path": "github.com/kisielk/errcheck",
"revision": "8050dd7cc11578becd8622667107bb21a7baf451",
"revisionTime": "2018-03-03T00:00:09Z"
},
{
"checksumSHA1": "GP25rgIPshJh0tpiBg3Z8Dexqj4=",
"path": "github.com/kisielk/errcheck/internal/errcheck",
"revision": "23699b7e2cbfdb89481023524954ba2aeff6be90",
"revisionTime": "2017-03-17T17:34:29Z"
},
{
"checksumSHA1": "9fvV44Csmu+K5BpFvEBs2p8alBI=",
"path": "github.com/kisielk/gotool",
"revision": "0de1eaf82fa3f583ce21fde859f1e7e0c5e9b220",
"revisionTime": "2016-11-30T08:01:11Z"
},
{
"checksumSHA1": "Us06jbfYQlapYdo8mO94mQMy22o=",
"path": "github.com/mattn/goveralls",
"revision": "a419d25dbaefa70d50cfbf5fbd2fc2f047bf95d2",
"revisionTime": "2018-03-01T14:36:12Z"
},
{
"checksumSHA1": "dk0ehYSmMaGLWFQPND3cVgk744I=",
"path": "github.com/mdempsky/maligned",
"revision": "08c8e9db1bce03f1af283686c0943fcb75f0109e",
"revisionTime": "2016-08-25T09:47:39Z"
},
{
"checksumSHA1": "90pFJb64MwgYvN5AmPVaWl87ZyU=",
"path": "github.com/mdempsky/unconvert",
"revision": "beb68d938016d2dec1d1b078054f4d3db25f97be",
"revisionTime": "2016-08-03T23:01:54Z"
},
{
"checksumSHA1": "k3eGAQ+pCIffVpgvoBrPuLK6Yz8=",
"path": "github.com/mibk/dupl",
"revision": "72dc2d83bec70e053e9294378aacb1a032f51a31",
"revisionTime": "2017-11-19T16:48:37Z"
},
{
"checksumSHA1": "sHi3Qhc2/0XMcIXB31NLXkbIoz0=",
"path": "github.com/mibk/dupl/job",
"revision": "3447d9b0cb5a3e7dccb1a9f1c975f35683f304e6",
"revisionTime": "2017-02-27T22:14:17Z"
},
{
"checksumSHA1": "VZ5EU9NrZck4UX3OpC9YK/gB/A4=",
"path": "github.com/mibk/dupl/output",
"revision": "3447d9b0cb5a3e7dccb1a9f1c975f35683f304e6",
"revisionTime": "2017-02-27T22:14:17Z"
},
{
"checksumSHA1": "mGQ3tVbY9uLwfwoeQjvBBBm7yRw=",
"path": "github.com/mibk/dupl/printer",
"revision": "72dc2d83bec70e053e9294378aacb1a032f51a31",
"revisionTime": "2017-11-19T16:48:37Z"
},
{
"checksumSHA1": "o9BJwhna5BuCTYWBGBo9VA+Ez/M=",
"path": "github.com/mibk/dupl/suffixtree",
"revision": "3447d9b0cb5a3e7dccb1a9f1c975f35683f304e6",
"revisionTime": "2017-02-27T22:14:17Z"
},
{
"checksumSHA1": "HveZ42ihDCZQumgGFMQIs8Nendg=",
"path": "github.com/mibk/dupl/syntax",
"revision": "3447d9b0cb5a3e7dccb1a9f1c975f35683f304e6",
"revisionTime": "2017-02-27T22:14:17Z"
},
{
"checksumSHA1": "YuBPssHbL/iU+1poNlfBDl2IqG4=",
"path": "github.com/mibk/dupl/syntax/golang",
"revision": "3447d9b0cb5a3e7dccb1a9f1c975f35683f304e6",
"revisionTime": "2017-02-27T22:14:17Z"
},
{
"checksumSHA1": "GtTRl0HhAPDjp+s02RJnFg3znZ0=",
"path": "github.com/mvdan/interfacer",
"revision": "22c51662ff476dfd97944f74db1b263ed920ee83",
"revisionTime": "2017-04-06T16:05:15Z"
},
{
"checksumSHA1": "zD/VW+BRbOjxk1xq5bmdigi0cp8=",
"path": "github.com/mvdan/interfacer/cmd/interfacer",
"revision": "22c51662ff476dfd97944f74db1b263ed920ee83",
"revisionTime": "2017-04-06T16:05:15Z"
},
{
"checksumSHA1": "18GDIJCo0vo+mmQDIYmyb2JSWqo=",
"path": "github.com/mvdan/lint",
"revision": "c9cbe299b369cbfea16318baaa037b19a69e45d2",
"revisionTime": "2017-04-06T10:09:31Z"
},
{
"checksumSHA1": "5LiZtu67exUdRJ0/QQvU/epG9no=",
"path": "github.com/mvdan/unparam",
"revision": "d647bb803b10a6777ee4c6a176416b91fa14713e",
"revisionTime": "2017-05-30T08:59:07Z"
},
{
"checksumSHA1": "tuOLCrGa9DjfXheKkMXtHtQu3bs=",
"path": "github.com/mvdan/unparam/check",
"revision": "d647bb803b10a6777ee4c6a176416b91fa14713e",
"revisionTime": "2017-05-30T08:59:07Z"
},
{
"checksumSHA1": "DP8R0Q7TDlHbhz9Livyj8RkRKvU=",
"path": "github.com/nbutton23/zxcvbn-go",
"revision": "a22cb81b2ecdde8b68e9ffb8824731cbf88e1de4",
"revisionTime": "2016-06-27T00:44:24Z"
},
{
"checksumSHA1": "HEqKoRuKJ86gic8DPZqtDKZNo7E=",
"path": "github.com/nbutton23/zxcvbn-go/adjacency",
"revision": "a22cb81b2ecdde8b68e9ffb8824731cbf88e1de4",
"revisionTime": "2016-06-27T00:44:24Z"
},
{
"checksumSHA1": "etc47rBuvFfzUZ7n8EDLQiDQeXU=",
"path": "github.com/nbutton23/zxcvbn-go/data",
"revision": "a22cb81b2ecdde8b68e9ffb8824731cbf88e1de4",
"revisionTime": "2016-06-27T00:44:24Z"
},
{
"checksumSHA1": "/NSHii4ih+43IBnoXrjAtIzTtPI=",
"path": "github.com/nbutton23/zxcvbn-go/entropy",
"revision": "a22cb81b2ecdde8b68e9ffb8824731cbf88e1de4",
"revisionTime": "2016-06-27T00:44:24Z"
},
{
"checksumSHA1": "VyH3r1FJcSB13wj0T812EMASq1Q=",
"path": "github.com/nbutton23/zxcvbn-go/frequency",
"revision": "a22cb81b2ecdde8b68e9ffb8824731cbf88e1de4",
"revisionTime": "2016-06-27T00:44:24Z"
},
{
"checksumSHA1": "U6O/H84jE24jhCSOgi+IsjSPomM=",
"path": "github.com/nbutton23/zxcvbn-go/match",
"revision": "a22cb81b2ecdde8b68e9ffb8824731cbf88e1de4",
"revisionTime": "2016-06-27T00:44:24Z"
},
{
"checksumSHA1": "BHO5wnIg2NQTYSILHmpezEJFv4E=",
"path": "github.com/nbutton23/zxcvbn-go/matching",
"revision": "a22cb81b2ecdde8b68e9ffb8824731cbf88e1de4",
"revisionTime": "2016-06-27T00:44:24Z"
},
{
"checksumSHA1": "vRXGbBJSonwD03A/WAkhNkYNY38=",
"path": "github.com/nbutton23/zxcvbn-go/scoring",
"revision": "a22cb81b2ecdde8b68e9ffb8824731cbf88e1de4",
"revisionTime": "2016-06-27T00:44:24Z"
},
{
"checksumSHA1": "cEdCjSL9cNJm5o+nGwPM3WgloyM=",
"path": "github.com/nbutton23/zxcvbn-go/utils/math",
"revision": "a22cb81b2ecdde8b68e9ffb8824731cbf88e1de4",
"revisionTime": "2016-06-27T00:44:24Z"
},
{
"checksumSHA1": "uEc9/1HbYGeK7wPStF6FmUlfzGE=",
"path": "github.com/nicksnyder/go-i18n/i18n",
"revision": "3e70a1a463008cea6726380c908b1a6a8bdf7b24",
"revisionTime": "2017-05-12T15:20:54Z"
},
{
"checksumSHA1": "gDe7nlx3FyCVxLkARgl0VAntDRk=",
"path": "github.com/nicksnyder/go-i18n/i18n/bundle",
"revision": "3e70a1a463008cea6726380c908b1a6a8bdf7b24",
"revisionTime": "2017-05-12T15:20:54Z"
},
{
"checksumSHA1": "+XOg99I1zdmBRUb04ZswvzQ2WS0=",
"path": "github.com/nicksnyder/go-i18n/i18n/language",
"revision": "3e70a1a463008cea6726380c908b1a6a8bdf7b24",
"revisionTime": "2017-05-12T15:20:54Z"
},
{
"checksumSHA1": "WZOU406In2hs8FJOHWqV8PWkJKs=",
"path": "github.com/nicksnyder/go-i18n/i18n/translation",
"revision": "3e70a1a463008cea6726380c908b1a6a8bdf7b24",
"revisionTime": "2017-05-12T15:20:54Z"
},
{
"checksumSHA1": "rpXu/2iiIGcK3KMKqXfko3g6rdk=",
"path": "github.com/opennota/check/cmd/aligncheck",
"revision": "11e2eec79ec4f789607e3efbf405cdca2504d4cb",
"revisionTime": "2017-04-02T03:17:31Z"
},
{
"checksumSHA1": "eWl/ySoMqPr+Q9p9smYNkTgXu2w=",
"path": "github.com/opennota/check/cmd/structcheck",
"revision": "86da7ade2cccfc1c5d6beeb55e5c65eba54f5f3c",
"revisionTime": "2018-01-21T06:50:09Z"
},
{
"checksumSHA1": "2NeV5byYMgK2g1GLWiqQWwt/OzE=",
"path": "github.com/opennota/check/cmd/varcheck",
"revision": "86da7ade2cccfc1c5d6beeb55e5c65eba54f5f3c",
"revisionTime": "2018-01-21T06:50:09Z"
},
{
"checksumSHA1": "F1IYMLBLAZaTOWnmXsgaxTGvrWI=",
"path": "github.com/pelletier/go-buffruneio",
"revision": "c37440a7cf42ac63b919c752ca73a85067e05992",
"revisionTime": "2017-02-27T22:03:11Z"
},
{
"checksumSHA1": "vHrGGP777P2fqQHr2IYwNVVRQ/o=",
"path": "github.com/pelletier/go-toml",
"revision": "fe7536c3dee2596cdd23ee9976a17c22bdaae286",
"revisionTime": "2017-06-02T06:55:32Z"
},
{
"checksumSHA1": "rJab1YdNhQooDiBWNnt7TLWPyBU=",
"path": "github.com/pkg/errors",
"revision": "c605e284fe17294bda444b34710735b29d1a9d90",
"revisionTime": "2017-05-05T04:36:39Z"
},
{
"checksumSHA1": "6JP37UqrI0H80Gpk0Y2P+KXgn5M=",
"path": "github.com/ryanuber/go-glob",
"revision": "256dc444b735e061061cf46c809487313d5b0065",
"revisionTime": "2017-01-28T01:21:29Z"
},
{
"checksumSHA1": "PMpzEhKo6usb71Qsby+a8uZMgBw=",
"path": "github.com/stripe/safesql",
"revision": "cddf355596fe2dbae05b4b5f845b4a6e2fb4e818",
"revisionTime": "2017-12-21T19:52:08Z"
},
{
"checksumSHA1": "9YtB2Xi9YK/scfhUOjgxmjoaqUw=",
"path": "github.com/tmthrgd/go-bindata",
"revision": "40f4993ede74f673cfe96bed75ef8513a389a00a",
"revisionTime": "2017-11-30T10:15:03Z"
},
{
"checksumSHA1": "JpZW4NtMSnXZ7T7rug7JEYgeHKc=",
"path": "github.com/tmthrgd/go-bindata/go-bindata",
"revision": "40f4993ede74f673cfe96bed75ef8513a389a00a",
"revisionTime": "2017-11-30T10:15:03Z"
},
{
"checksumSHA1": "/XExakIFq9PUOjkjlMpe7T/Ps+8=",
"path": "github.com/tmthrgd/go-bindata/internal/identifier",
"revision": "40f4993ede74f673cfe96bed75ef8513a389a00a",
"revisionTime": "2017-11-30T10:15:03Z"
},
{
"checksumSHA1": "fZaFaXc4iKu9PXl8xrmK3RrZpIY=",
"path": "github.com/tsenart/deadcode",
"revision": "210d2dc333e90c7e3eedf4f2242507a8e83ed4ab",
"revisionTime": "2016-07-24T21:28:37Z"
},
{
"checksumSHA1": "ih4CCYD19rjjF9fjid+l7w/+cIg=",
"path": "github.com/wadey/gocovmerge",
"revision": "b5bfa59ec0adc420475f97f89b58045c721d761c",
"revisionTime": "2016-03-31T18:18:00Z"
},
{
"checksumSHA1": "g27xFm/EIghjjcT3DuGt976CgNo=",
"path": "github.com/walle/lll",
"revision": "8b13b3fbf7312913fcfdbfa78997b9bd1dbb11af",
"revisionTime": "2016-07-02T15:04:58Z"
},
{
"checksumSHA1": "V74uq4M+82grbD85c6TQ3JyMCL4=",
"path": "github.com/walle/lll/cmd/lll",
"revision": "8b13b3fbf7312913fcfdbfa78997b9bd1dbb11af",
"revisionTime": "2016-07-02T15:04:58Z"
},
{
"checksumSHA1": "S32hhkopTwtHKbri0u4mwxV0UqQ=",
"path": "golang.org/x/lint",
"revision": "fb4f8c1d3a179654f93ef7e91d68fc7b1de6e88f",
"revisionTime": "2018-03-01T17:26:52Z"
},
{
"checksumSHA1": "PugQbLLjnbBSj+NOXRYBVRnLuuQ=",
"path": "golang.org/x/sys/unix",
"revision": "b90f89a1e7a9c1f6b918820b3daa7f08488c8594",
"revisionTime": "2017-05-29T13:44:53Z"
},
{
"checksumSHA1": "ziMb9+ANGRJSSIuxYdRbA+cDRBQ=",
"path": "golang.org/x/text/transform",
"revision": "ccbd3f7822129ff389f8ca4858a9b9d4d910531c",
"revisionTime": "2017-05-18T06:42:59Z"
},
{
"checksumSHA1": "aCXemG0knLp8YJedta7fYAIiX/8=",
"path": "golang.org/x/text/width",
"revision": "ccbd3f7822129ff389f8ca4858a9b9d4d910531c",
"revisionTime": "2017-05-18T06:42:59Z"
},
{
"checksumSHA1": "V4M/6A62nVBzPFxPbN+EAatCrVs=",
"path": "golang.org/x/tools/cmd/goimports",
"revision": "9f6d4ad827bbe70b5f5c8db2d3d279ea0a2767ad",
"revisionTime": "2018-02-17T07:00:07Z"
},
{
"checksumSHA1": "V6/A1ZOZ2GUOZcRWcXegtci2FoU=",
"path": "golang.org/x/tools/cmd/gotype",
"revision": "9f6d4ad827bbe70b5f5c8db2d3d279ea0a2767ad",
"revisionTime": "2018-02-17T07:00:07Z"
},
{
"checksumSHA1": "nD89PLkMqA5CakR8SoDuj3iQz1M=",
"path": "golang.org/x/tools/container/intsets",
"revision": "2a5864fcfb595b4ee9a7607f1beb25778cf64c6e",
"revisionTime": "2017-03-22T18:59:57Z"
},
{
"checksumSHA1": "O5eYI3n1WdaC30AxQjETe3dAQHU=",
"path": "golang.org/x/tools/cover",
"revision": "2a5864fcfb595b4ee9a7607f1beb25778cf64c6e",
"revisionTime": "2017-03-22T18:59:57Z"
},
{
"checksumSHA1": "p3gWsy4fQOSXGRMUHr3TnmVFias=",
"path": "golang.org/x/tools/go/ast/astutil",
"revision": "2a5864fcfb595b4ee9a7607f1beb25778cf64c6e",
"revisionTime": "2017-03-22T18:59:57Z"
},
{
"checksumSHA1": "AnXFEvmaJ7w2Q7hWPcLUmCbPgq0=",
"path": "golang.org/x/tools/go/buildutil",
"revision": "2a5864fcfb595b4ee9a7607f1beb25778cf64c6e",
"revisionTime": "2017-03-22T18:59:57Z"
},
{
"checksumSHA1": "9evbcWFxUJMFmnXQ2ja5765p3iE=",
"path": "golang.org/x/tools/go/callgraph",
"revision": "2a5864fcfb595b4ee9a7607f1beb25778cf64c6e",
"revisionTime": "2017-03-22T18:59:57Z"
},
{
"checksumSHA1": "XAetbnZ2wmiJ68+j0am4Hp7K3j8=",
"path": "golang.org/x/tools/go/callgraph/cha",
"revision": "2a5864fcfb595b4ee9a7607f1beb25778cf64c6e",
"revisionTime": "2017-03-22T18:59:57Z"
},
{
"checksumSHA1": "d/01nwqyc48GkZ3eqEOMszzTwBE=",
"path": "golang.org/x/tools/go/callgraph/rta",
"revision": "73e16cff9e0d4a802937444bebb562458548241d",
"revisionTime": "2018-02-27T16:02:18Z"
},
{
"checksumSHA1": "rSUfKH182TkCgMhJVsr84a19cbo=",
"path": "golang.org/x/tools/go/gcexportdata",
"revision": "2a5864fcfb595b4ee9a7607f1beb25778cf64c6e",
"revisionTime": "2017-03-22T18:59:57Z"
},
{
"checksumSHA1": "o6uoZozSLnj3Ph+hj399ZPqJYhE=",
"path": "golang.org/x/tools/go/gcimporter15",
"revision": "2a5864fcfb595b4ee9a7607f1beb25778cf64c6e",
"revisionTime": "2017-03-22T18:59:57Z"
},
{
"checksumSHA1": "3HnsDHAsl+izX3j9xpU6veKrWpk=",
"path": "golang.org/x/tools/go/loader",
"revision": "2a5864fcfb595b4ee9a7607f1beb25778cf64c6e",
"revisionTime": "2017-03-22T18:59:57Z"
},
{
"checksumSHA1": "SzM7AWf+ZXc67vcI3jxvaD6iyM0=",
"path": "golang.org/x/tools/go/pointer",
"revision": "2a5864fcfb595b4ee9a7607f1beb25778cf64c6e",
"revisionTime": "2017-03-22T18:59:57Z"
},
{
"checksumSHA1": "q0kKsRINLQjcGI4RVJ8//lmsHsc=",
"path": "golang.org/x/tools/go/ssa",
"revision": "2a5864fcfb595b4ee9a7607f1beb25778cf64c6e",
"revisionTime": "2017-03-22T18:59:57Z"
},
{
"checksumSHA1": "FSjqqXMVKi4WoCqohpzt5z+6mMI=",
"path": "golang.org/x/tools/go/ssa/ssautil",
"revision": "2a5864fcfb595b4ee9a7607f1beb25778cf64c6e",
"revisionTime": "2017-03-22T18:59:57Z"
},
{
"checksumSHA1": "QKvPv3TJ+ZnOLkUeUkT8Wm8eCV0=",
"path": "golang.org/x/tools/go/types/typeutil",
"revision": "2a5864fcfb595b4ee9a7607f1beb25778cf64c6e",
"revisionTime": "2017-03-22T18:59:57Z"
},
{
"checksumSHA1": "TNJiDMoJEKYZyXo8Vkj37gqH5A0=",
"path": "golang.org/x/tools/go/vcs",
"revision": "2a5864fcfb595b4ee9a7607f1beb25778cf64c6e",
"revisionTime": "2017-03-22T18:59:57Z"
},
{
"checksumSHA1": "DKr5TDU73FjKqiag3sAiVRKRfK0=",
"path": "golang.org/x/tools/imports",
"revision": "2a5864fcfb595b4ee9a7607f1beb25778cf64c6e",
"revisionTime": "2017-03-22T18:59:57Z"
},
{
"checksumSHA1": "1FcU7G3PX7GVBLOrtxtvDWVvImo=",
"path": "gopkg.in/alecthomas/kingpin.v3-unstable",
"revision": "bd961acaef2390fc48159c3acaad41ef31833920",
"revisionTime": "2017-05-21T07:44:49Z"
},
{
"checksumSHA1": "fALlQNY1fM99NesfLJ50KguWsio=",
"path": "gopkg.in/yaml.v2",
"revision": "cd8b52f8269e0feb286dfeef29f8fe4d5b397e0b",
"revisionTime": "2017-04-07T17:21:22Z"
},
{
"checksumSHA1": "FG8LnaSRTHBnrPHwa0zW4zX9K7M=",
"path": "honnef.co/go/tools/callgraph",
"revision": "376b3b58b9e4def403181ee2fd3d4cc7de8375ae",
"revisionTime": "2017-11-25T07:40:24Z"
},
{
"checksumSHA1": "fR7Q7BVwKHUEsUNGn6Q2zygAvTU=",
"path": "honnef.co/go/tools/callgraph/static",
"revision": "376b3b58b9e4def403181ee2fd3d4cc7de8375ae",
"revisionTime": "2017-11-25T07:40:24Z"
},
{
"checksumSHA1": "YL/UCzWYvDXeFInLOLC1buYve6w=",
"path": "honnef.co/go/tools/cmd/gosimple",
"revision": "8ed405e85c65fb38745a8eafe01ee9590523f172",
"revisionTime": "2018-01-10T22:45:03Z"
},
{
"checksumSHA1": "84jyAI0Uv1PQ3fN3Ufi0T7/IpOw=",
"path": "honnef.co/go/tools/cmd/megacheck",
"revision": "8ed405e85c65fb38745a8eafe01ee9590523f172",
"revisionTime": "2018-01-10T22:45:03Z"
},
{
"checksumSHA1": "dP4Ft0yiZSTZOzzNho1Gg5b7o2w=",
"path": "honnef.co/go/tools/cmd/staticcheck",
"revision": "8ed405e85c65fb38745a8eafe01ee9590523f172",
"revisionTime": "2018-01-10T22:45:03Z"
},
{
"checksumSHA1": "Qipy1/3Z8n4UnoWF9X0sQ/VC5JI=",
"path": "honnef.co/go/tools/cmd/unused",
"revision": "8ed405e85c65fb38745a8eafe01ee9590523f172",
"revisionTime": "2018-01-10T22:45:03Z"
},
{
"checksumSHA1": "smQXvyCgi0lsTRk7edZNx/z44rc=",
"path": "honnef.co/go/tools/deprecated",
"revision": "376b3b58b9e4def403181ee2fd3d4cc7de8375ae",
"revisionTime": "2017-11-25T07:40:24Z"
},
{
"checksumSHA1": "ZQAEQCc18o76M9Cyncm1W5cczJ8=",
"path": "honnef.co/go/tools/functions",
"revision": "376b3b58b9e4def403181ee2fd3d4cc7de8375ae",
"revisionTime": "2017-11-25T07:40:24Z"
},
{
"checksumSHA1": "NPXYxmyCQTv53OmGTCiHvbfIct4=",
"path": "honnef.co/go/tools/gcsizes",
"revision": "e94d1c1a34c6b61d8d06c7793b8f22cd0dfcdd90",
"revisionTime": "2017-05-22T19:09:05Z"
},
{
"checksumSHA1": "ZWtH73AO33mmXmK2RfGwld1/00I=",
"path": "honnef.co/go/tools/internal/sharedcheck",
"revision": "376b3b58b9e4def403181ee2fd3d4cc7de8375ae",
"revisionTime": "2017-11-25T07:40:24Z"
},
{
"checksumSHA1": "44oONKG61hcaBAPaA2jNhBgYLmE=",
"path": "honnef.co/go/tools/lint",
"revision": "376b3b58b9e4def403181ee2fd3d4cc7de8375ae",
"revisionTime": "2017-11-25T07:40:24Z"
},
{
"checksumSHA1": "gKJKwlFyfVebwzqA3P/N3HJIq/0=",
"path": "honnef.co/go/tools/lint/lintutil",
"revision": "376b3b58b9e4def403181ee2fd3d4cc7de8375ae",
"revisionTime": "2017-11-25T07:40:24Z"
},
{
"checksumSHA1": "kkVszwWx3L3erU3QkMDIppFv34o=",
"path": "honnef.co/go/tools/simple",
"revision": "376b3b58b9e4def403181ee2fd3d4cc7de8375ae",
"revisionTime": "2017-11-25T07:40:24Z"
},
{
"checksumSHA1": "OB5QjdkxC9rYXruXUuoYSsxK+VY=",
"path": "honnef.co/go/tools/ssa",
"revision": "376b3b58b9e4def403181ee2fd3d4cc7de8375ae",
"revisionTime": "2017-11-25T07:40:24Z"
},
{
"checksumSHA1": "jgNTrcXg52qlqjkb/R2vKxtcDu4=",
"path": "honnef.co/go/tools/ssa/ssautil",
"revision": "376b3b58b9e4def403181ee2fd3d4cc7de8375ae",
"revisionTime": "2017-11-25T07:40:24Z"
},
{
"checksumSHA1": "9kqdoLEm2gHS9QVE1OXWBCtRqhI=",
"path": "honnef.co/go/tools/staticcheck",
"revision": "376b3b58b9e4def403181ee2fd3d4cc7de8375ae",
"revisionTime": "2017-11-25T07:40:24Z"
},
{
"checksumSHA1": "9u74dwwwi+tg9eBr86by4i4CMNM=",
"path": "honnef.co/go/tools/staticcheck/vrp",
"revision": "376b3b58b9e4def403181ee2fd3d4cc7de8375ae",
"revisionTime": "2017-11-25T07:40:24Z"
},
{
"checksumSHA1": "tDBL3athXaJ9JoiY75NktH+OTjQ=",
"path": "honnef.co/go/tools/unused",
"revision": "376b3b58b9e4def403181ee2fd3d4cc7de8375ae",
"revisionTime": "2017-11-25T07:40:24Z"
},
{
"checksumSHA1": "RY0sZkXnDI/MxBauBD28dwuulSs=",
"path": "honnef.co/go/tools/version",
"revision": "376b3b58b9e4def403181ee2fd3d4cc7de8375ae",
"revisionTime": "2017-11-25T07:40:24Z"
},
{
"path": "local/numcpus",
"revision": ""
},
{
"checksumSHA1": "FlLpgONxRMWkHp8H9c461RKJMhQ=",
"path": "mvdan.cc/interfacer",
"revision": "99221a8084d79b2e7419d4a6ddd9d8c7761eae6c",
"revisionTime": "2018-03-01T11:25:15Z"
},
{
"checksumSHA1": "0+bmt/m62xZSbyATqBbp1MTy6ZI=",
"path": "mvdan.cc/interfacer/check",
"revision": "d7e7372184a059b8fd99d96a593e3811bf989d75",
"revisionTime": "2017-09-08T18:13:45Z"
},
{
"checksumSHA1": "pCQUv3qVciM9V98kVNkOw1JWKzs=",
"path": "mvdan.cc/lint",
"revision": "adc824a0674b99099789b6188a058d485eaf61c0",
"revisionTime": "2017-09-08T18:12:59Z"
},
{
"checksumSHA1": "BX0SRkBmSo6WoyfZtcw4ympOsI8=",
"path": "mvdan.cc/unparam",
"revision": "0c3aec22d8e6d9b51a978b31539c51fd52071488",
"revisionTime": "2018-03-01T11:27:09Z"
},
{
"checksumSHA1": "aN6Bomg+fwd0GSfKYVgmPf0pd+I=",
"path": "mvdan.cc/unparam/check",
"revision": "0c3aec22d8e6d9b51a978b31539c51fd52071488",
"revisionTime": "2018-03-01T11:27:09Z"
}
],
"rootPath": "github.com/wrouesnel/postgres_exporter/tools"
}

357
vendor/vendor.json vendored Normal file
View File

@ -0,0 +1,357 @@
{
"comment": "",
"ignore": "test",
"package": [
{
"checksumSHA1": "KmjnydoAbofMieIWm+it5OWERaM=",
"path": "github.com/alecthomas/template",
"revision": "a0175ee3bccc567396460bf5acd36800cb10c49c",
"revisionTime": "2016-04-05T07:15:01Z"
},
{
"checksumSHA1": "3wt0pTXXeS+S93unwhGoLIyGX/Q=",
"path": "github.com/alecthomas/template/parse",
"revision": "a0175ee3bccc567396460bf5acd36800cb10c49c",
"revisionTime": "2016-04-05T07:15:01Z"
},
{
"checksumSHA1": "fCc3grA7vIxfBru7R3SqjcW+oLI=",
"path": "github.com/alecthomas/units",
"revision": "2efee857e7cfd4f3d0138cc3cbb1b4966962b93a",
"revisionTime": "2015-10-22T06:55:26Z"
},
{
"checksumSHA1": "spyv5/YFBjYyZLZa1U2LBfDR8PM=",
"path": "github.com/beorn7/perks/quantile",
"revision": "4c0e84591b9aa9e6dcfdf3e020114cd81f89d5f9",
"revisionTime": "2016-08-04T10:47:26Z"
},
{
"checksumSHA1": "OT4XN9z5k69e2RsMSpwW74B+yk4=",
"path": "github.com/blang/semver",
"revision": "2ee87856327ba09384cabd113bc6b5d174e9ec0f",
"revisionTime": "2017-07-27T06:48:18Z"
},
{
"checksumSHA1": "92dnVWesQCC1xueK1Du/6c+yLOk=",
"path": "github.com/dsnet/compress",
"revision": "cc9eb1d7ad760af14e8f918698f745e80377af4f",
"revisionTime": "2017-12-08T18:51:09Z"
},
{
"checksumSHA1": "Q8Y8aBNAuiO4/HVyj9PRyBz50YM=",
"path": "github.com/dsnet/compress/bzip2",
"revision": "cc9eb1d7ad760af14e8f918698f745e80377af4f",
"revisionTime": "2017-12-08T18:51:09Z"
},
{
"checksumSHA1": "rUK6wJzSweagbKHcRUU1TWkQq/0=",
"path": "github.com/dsnet/compress/bzip2/internal/sais",
"revision": "cc9eb1d7ad760af14e8f918698f745e80377af4f",
"revisionTime": "2017-12-08T18:51:09Z"
},
{
"checksumSHA1": "u6VJ7jTVulLgPZaXKWCIHc4hbQs=",
"path": "github.com/dsnet/compress/internal",
"revision": "cc9eb1d7ad760af14e8f918698f745e80377af4f",
"revisionTime": "2017-12-08T18:51:09Z"
},
{
"checksumSHA1": "KDfyyvx86cyY/HUA2SSWRWjn7yI=",
"path": "github.com/dsnet/compress/internal/errors",
"revision": "cc9eb1d7ad760af14e8f918698f745e80377af4f",
"revisionTime": "2017-12-08T18:51:09Z"
},
{
"checksumSHA1": "Txyi+DYhWRT65KnJokyQWB2xj3A=",
"path": "github.com/dsnet/compress/internal/prefix",
"revision": "cc9eb1d7ad760af14e8f918698f745e80377af4f",
"revisionTime": "2017-12-08T18:51:09Z"
},
{
"checksumSHA1": "yqF125xVSkmfLpIVGrLlfE05IUk=",
"path": "github.com/golang/protobuf/proto",
"revision": "1643683e1b54a9e88ad26d98f81400c8c9d9f4f9",
"revisionTime": "2017-10-21T04:39:52Z"
},
{
"checksumSHA1": "p/8vSviYF91gFflhrt5vkyksroo=",
"path": "github.com/golang/snappy",
"revision": "553a641470496b2327abcac10b36396bd98e45c9",
"revisionTime": "2017-02-15T23:32:05Z"
},
{
"checksumSHA1": "GKTFbGomCP1fhH7mFecvwKvh7bc=",
"path": "github.com/lib/pq",
"revision": "78223426e7c66d631117c0a9da1b7f3fde4d23a5",
"revisionTime": "2019-08-13T06:55:22Z"
},
{
"checksumSHA1": "AU3fA8Sm33Vj9PBoRPSeYfxLRuE=",
"path": "github.com/lib/pq/oid",
"revision": "b609790bd85edf8e9ab7e0f8912750a786177bcf",
"revisionTime": "2017-10-22T19:20:43Z"
},
{
"checksumSHA1": "n0MMCrKKsQuuhv7vLsrtRUGJVA8=",
"path": "github.com/lib/pq/scram",
"revision": "78223426e7c66d631117c0a9da1b7f3fde4d23a5",
"revisionTime": "2019-08-13T06:55:22Z"
},
{
"checksumSHA1": "k3e1TD8wrhxfUUG3pQBb10ppNGA=",
"path": "github.com/magefile/mage",
"revision": "81dbe7074be509fcdc5e496481a8e01276332745",
"revisionTime": "2018-02-12T16:24:26Z"
},
{
"checksumSHA1": "KODorM0Am1g55qObNz3jVOdRVFs=",
"path": "github.com/magefile/mage/build",
"revision": "81dbe7074be509fcdc5e496481a8e01276332745",
"revisionTime": "2018-02-12T16:24:26Z"
},
{
"checksumSHA1": "jdM6DuMtXKrl42m0pM/1YOAPkxc=",
"path": "github.com/magefile/mage/mage",
"revision": "81dbe7074be509fcdc5e496481a8e01276332745",
"revisionTime": "2018-02-12T16:24:26Z"
},
{
"checksumSHA1": "TkAemcxaY44gsEjO1BiBxwlEI4A=",
"path": "github.com/magefile/mage/mg",
"revision": "81dbe7074be509fcdc5e496481a8e01276332745",
"revisionTime": "2018-02-12T16:24:26Z"
},
{
"checksumSHA1": "b1qY9BFtpJnIZEa8yvpJCRbOhRM=",
"path": "github.com/magefile/mage/parse",
"revision": "81dbe7074be509fcdc5e496481a8e01276332745",
"revisionTime": "2018-02-12T16:24:26Z"
},
{
"checksumSHA1": "fEuDveZzYX6oqYOT9jqyZROun/Q=",
"path": "github.com/magefile/mage/parse/srcimporter",
"revision": "81dbe7074be509fcdc5e496481a8e01276332745",
"revisionTime": "2018-02-12T16:24:26Z"
},
{
"checksumSHA1": "0/j3qlGc8fsWG42uIDZ5p8tVzPM=",
"path": "github.com/magefile/mage/sh",
"revision": "81dbe7074be509fcdc5e496481a8e01276332745",
"revisionTime": "2018-02-12T16:24:26Z"
},
{
"checksumSHA1": "oAjx69UIs6F6hPh+2GQSBMaHAfc=",
"path": "github.com/magefile/mage/target",
"revision": "81dbe7074be509fcdc5e496481a8e01276332745",
"revisionTime": "2018-02-12T16:24:26Z"
},
{
"checksumSHA1": "He+VtZO7BsPDCZhZtJ1IkNp629o=",
"path": "github.com/magefile/mage/types",
"revision": "81dbe7074be509fcdc5e496481a8e01276332745",
"revisionTime": "2018-02-12T16:24:26Z"
},
{
"checksumSHA1": "bKMZjd2wPw13VwoE7mBeSv5djFA=",
"path": "github.com/matttproud/golang_protobuf_extensions/pbutil",
"revision": "c12348ce28de40eed0136aa2b644d0ee0650e56c",
"revisionTime": "2016-04-24T11:30:07Z"
},
{
"checksumSHA1": "VqPwpjQKzPYZcTkqZOIk8b+gYqI=",
"path": "github.com/mholt/archiver",
"revision": "26cf5bb32d07aa4e8d0de15f56ce516f4641d7df",
"revisionTime": "2017-10-12T05:23:41Z"
},
{
"checksumSHA1": "rz0k2HRJ9gx11wt/gqATiRd2qz8=",
"path": "github.com/nwaples/rardecode",
"revision": "e06696f847aeda6f39a8f0b7cdff193b7690aef6",
"revisionTime": "2017-03-13T01:07:58Z"
},
{
"checksumSHA1": "xKzx54LbkghuMauevGWevn5ip3w=",
"path": "github.com/pierrec/lz4",
"revision": "ed8d4cc3b461464e69798080a0092bd028910298",
"revisionTime": "2018-01-13T15:17:03Z"
},
{
"checksumSHA1": "zPWRjzsPeXCoqmidIcJtHbvrvRs=",
"path": "github.com/pierrec/xxHash/xxHash32",
"revision": "a0006b13c722f7f12368c00a3d3c2ae8a999a0c6",
"revisionTime": "2017-07-14T08:24:55Z"
},
{
"checksumSHA1": "5dHjKxShYVWVB1Fb00dAnR6kqVk=",
"path": "github.com/prometheus/client_golang/prometheus",
"revision": "2641b987480bca71fb39738eb8c8b0d577cb1d76",
"revisionTime": "2019-06-07T14:56:44Z",
"version": "v0.9.4",
"versionExact": "v0.9.4"
},
{
"checksumSHA1": "UBqhkyjCz47+S19MVTigxJ2VjVQ=",
"path": "github.com/prometheus/client_golang/prometheus/internal",
"revision": "2641b987480bca71fb39738eb8c8b0d577cb1d76",
"revisionTime": "2019-06-07T14:56:44Z",
"version": "v0.9.4",
"versionExact": "v0.9.4"
},
{
"checksumSHA1": "V51yx4gq61QCD9clxnps792Eq2Y=",
"path": "github.com/prometheus/client_golang/prometheus/promhttp",
"revision": "2641b987480bca71fb39738eb8c8b0d577cb1d76",
"revisionTime": "2019-06-07T14:56:44Z",
"version": "v0.9.4",
"versionExact": "v0.9.4"
},
{
"checksumSHA1": "DvwvOlPNAgRntBzt3b3OSRMS2N4=",
"path": "github.com/prometheus/client_model/go",
"revision": "6f3806018612930941127f2a7c6c453ba2c527d2",
"revisionTime": "2017-02-16T18:52:47Z"
},
{
"checksumSHA1": "vA545Z9FkjGvIHBTAKQOE0nap/k=",
"path": "github.com/prometheus/common/expfmt",
"revision": "287d3e634a1e550c9e463dd7e5a75a422c614505",
"revisionTime": "2019-09-13T08:39:41Z",
"version": "v0.7.0",
"versionExact": "v0.7.0"
},
{
"checksumSHA1": "1Mhfofk+wGZ94M0+Bd98K8imPD4=",
"path": "github.com/prometheus/common/internal/bitbucket.org/ww/goautoneg",
"revision": "287d3e634a1e550c9e463dd7e5a75a422c614505",
"revisionTime": "2019-09-13T08:39:41Z",
"version": "v0.7.0",
"versionExact": "v0.7.0"
},
{
"checksumSHA1": "MGnqHnmEqc1fjnYiWReSiW8C27A=",
"path": "github.com/prometheus/common/log",
"revision": "287d3e634a1e550c9e463dd7e5a75a422c614505",
"revisionTime": "2019-09-13T08:39:41Z",
"version": "v0.7.0",
"versionExact": "v0.7.0"
},
{
"checksumSHA1": "ccmMs+h9Jo8kE7izqsUkWShD4d0=",
"path": "github.com/prometheus/common/model",
"revision": "287d3e634a1e550c9e463dd7e5a75a422c614505",
"revisionTime": "2019-09-13T08:39:41Z",
"version": "v0.7.0",
"versionExact": "v0.7.0"
},
{
"checksumSHA1": "91KYK0SpvkaMJJA2+BcxbVnyRO0=",
"path": "github.com/prometheus/common/version",
"revision": "287d3e634a1e550c9e463dd7e5a75a422c614505",
"revisionTime": "2019-09-13T08:39:41Z",
"version": "v0.7.0",
"versionExact": "v0.7.0"
},
{
"checksumSHA1": "WB7dFqkmD3R514xql9YM3ZP1dDM=",
"path": "github.com/prometheus/procfs",
"revision": "833678b5bb319f2d20a475cb165c6cc59c2cc77c",
"revisionTime": "2019-05-31T16:30:47Z",
"version": "v0.0.2",
"versionExact": "v0.0.2"
},
{
"checksumSHA1": "Kmjs49lbjGmlgUPx3pks0tVDed0=",
"path": "github.com/prometheus/procfs/internal/fs",
"revision": "65bdadfa96aecebf4dcf888da995a29eab4fc964",
"revisionTime": "2019-05-28T16:49:32Z",
"version": "v0.0.1",
"versionExact": "v0.0.1"
},
{
"checksumSHA1": "BYvROBsiyAXK4sq6yhDe8RgT4LM=",
"path": "github.com/sirupsen/logrus",
"revision": "89742aefa4b206dcf400792f3bd35b542998eb3b",
"revisionTime": "2017-08-22T13:27:46Z"
},
{
"checksumSHA1": "2CJmLcvYL6KW7gp2xaSdorR4i54=",
"path": "github.com/tmthrgd/go-bindata/restore",
"revision": "40f4993ede74f673cfe96bed75ef8513a389a00a",
"revisionTime": "2017-11-30T10:15:03Z"
},
{
"checksumSHA1": "qgMa75aMGbkFY0jIqqqgVnCUoNA=",
"path": "github.com/ulikunitz/xz",
"revision": "0c6b41e72360850ca4f98dc341fd999726ea007f",
"revisionTime": "2017-06-05T21:53:11Z"
},
{
"checksumSHA1": "vjnTkzNrMs5Xj6so/fq0mQ6dT1c=",
"path": "github.com/ulikunitz/xz/internal/hash",
"revision": "0c6b41e72360850ca4f98dc341fd999726ea007f",
"revisionTime": "2017-06-05T21:53:11Z"
},
{
"checksumSHA1": "m0pm57ASBK/CTdmC0ppRHO17mBs=",
"path": "github.com/ulikunitz/xz/internal/xlog",
"revision": "0c6b41e72360850ca4f98dc341fd999726ea007f",
"revisionTime": "2017-06-05T21:53:11Z"
},
{
"checksumSHA1": "2vZw6zc8xuNlyVz2QKvdlNSZQ1U=",
"path": "github.com/ulikunitz/xz/lzma",
"revision": "0c6b41e72360850ca4f98dc341fd999726ea007f",
"revisionTime": "2017-06-05T21:53:11Z"
},
{
"checksumSHA1": "nqWNlnMmVpt628zzvyo6Yv2CX5Q=",
"path": "golang.org/x/crypto/ssh/terminal",
"revision": "2509b142fb2b797aa7587dad548f113b2c0f20ce",
"revisionTime": "2017-10-23T14:45:55Z"
},
{
"checksumSHA1": "ftE54xFDY2r5NPWskbK88BQPrb4=",
"path": "golang.org/x/sys/unix",
"revision": "a1a1f1746d156bbc9954f29134b20ed4ce2752f1",
"revisionTime": "2017-10-23T12:30:29Z"
},
{
"checksumSHA1": "wGkVl9xZjgnLs/olurjDX2Yg8Xw=",
"path": "golang.org/x/sys/windows",
"revision": "a1a1f1746d156bbc9954f29134b20ed4ce2752f1",
"revisionTime": "2017-10-23T12:30:29Z"
},
{
"checksumSHA1": "ZdFZFaXmCgEEaEhVPkyXrnhKhsg=",
"path": "golang.org/x/sys/windows/registry",
"revision": "a1a1f1746d156bbc9954f29134b20ed4ce2752f1",
"revisionTime": "2017-10-23T12:30:29Z"
},
{
"checksumSHA1": "uVlUSSKplihZG7N+QJ6fzDZ4Kh8=",
"path": "golang.org/x/sys/windows/svc/eventlog",
"revision": "a1a1f1746d156bbc9954f29134b20ed4ce2752f1",
"revisionTime": "2017-10-23T12:30:29Z"
},
{
"checksumSHA1": "3SZTatHIy9OTKc95YlVfXKnoySg=",
"path": "gopkg.in/alecthomas/kingpin.v2",
"revision": "1087e65c9441605df944fb12c33f0fe7072d18ca",
"revisionTime": "2017-07-27T04:22:29Z"
},
{
"checksumSHA1": "CEFTYXtWmgSh+3Ik1NmDaJcz4E0=",
"path": "gopkg.in/check.v1",
"revision": "20d25e2804050c1cd24a7eea1e7a6447dd0e74ec",
"revisionTime": "2016-12-08T18:13:25Z"
},
{
"checksumSHA1": "RDJpJQwkF012L6m/2BJizyOksNw=",
"path": "gopkg.in/yaml.v2",
"revision": "eb3733d160e74a9c7e442f435eb3bea458e1d19f",
"revisionTime": "2017-08-12T16:00:11Z"
}
],
"rootPath": "github.com/wrouesnel/postgres_exporter"
}