加入 Gitee
与超过 1200万 开发者一起发现、参与优秀开源项目,私有仓库也完全免费 :)
免费加入
文件
克隆/下载
main.go 2.64 KB
一键复制 编辑 原始数据 按行查看 历史
inrgihc 提交于 2020-12-22 10:48 . 图形及版本修改
package main
import (
"greenplum-exporter/collector"
"github.com/prometheus/client_golang/prometheus"
logger "github.com/prometheus/common/log"
"github.com/prometheus/client_golang/prometheus/promhttp"
"gopkg.in/alecthomas/kingpin.v2"
"net/http"
)
/**
* 参考教程:https://www.cnblogs.com/momoyan/p/9943268.html
* 官方文档:https://godoc.org/github.com/prometheus/client_golang/prometheus
* 官方文档:https://gp-docs-cn.github.io/docs/admin_guide/monitoring/monitoring.html
*/
var (
listenAddress = kingpin.Flag("web.listen-address", "web endpoint").Default("0.0.0.0:9297").String()
metricPath = kingpin.Flag("web.telemetry-path", "Path under which to expose metrics.").Default("/metrics").String()
disableDefaultMetrics = kingpin.Flag("disableDefaultMetrics", "do not report default metrics(go metrics and process metrics)").Default("true").Bool()
)
var scrapers = map[collector.Scraper]bool{
collector.NewLocksScraper(): true,
collector.NewClusterStateScraper(): true,
collector.NewDatabaseSizeScraper(): true,
collector.NewConnectionsScraper(): true,
collector.NewMaxConnScraper(): true,
collector.NewSegmentScraper(): true,
collector.NewConnDetailScraper(): true,
collector.NewUsersScraper(): true,
collector.NewBgWriterStateScraper(): true,
collector.NewSystemScraper(): false,
collector.NewQueryScraper(): false,
collector.NewDynamicMemoryScraper(): false,
collector.NewDiskScraper(): false,
}
var gathers prometheus.Gatherers
func main() {
kingpin.Version("1.1.1")
kingpin.HelpFlag.Short('h')
logger.AddFlags(kingpin.CommandLine)
kingpin.Parse()
metricsHandleFunc := newHandler(*disableDefaultMetrics, scrapers)
mux := http.NewServeMux()
mux.HandleFunc(*metricPath, metricsHandleFunc)
logger.Warnf("Greenplum exporter is starting and will listening on : %s", *listenAddress)
logger.Error(http.ListenAndServe(*listenAddress, mux).Error())
}
func newHandler(disableDefaultMetrics bool, scrapers map[collector.Scraper]bool) http.HandlerFunc {
registry := prometheus.NewRegistry()
enabledScrapers := make([]collector.Scraper, 0, 16)
for scraper, enable := range scrapers {
if enable {
enabledScrapers = append(enabledScrapers, scraper)
}
}
greenPlumCollector := collector.NewCollector(enabledScrapers)
registry.MustRegister(greenPlumCollector)
if disableDefaultMetrics {
gathers = prometheus.Gatherers{registry}
} else {
gathers = prometheus.Gatherers{registry, prometheus.DefaultGatherer}
}
handler := promhttp.HandlerFor(gathers, promhttp.HandlerOpts{
ErrorHandling: promhttp.ContinueOnError,
})
return handler.ServeHTTP
}
马建仓 AI 助手
尝试更多
代码解读
代码找茬
代码优化