parent
66ecc420ef
commit
5fbde88919
@ -1,22 +0,0 @@ |
||||
# Copyright 2013 The Prometheus Authors
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
all: generated/config.pb.go |
||||
|
||||
SUFFIXES: |
||||
|
||||
include ../Makefile.INCLUDE |
||||
|
||||
generated/config.pb.go: config.proto |
||||
go get github.com/golang/protobuf/protoc-gen-go
|
||||
$(PROTOC) --proto_path=$(PREFIX)/include:. --go_out=generated/ config.proto
|
||||
@ -1,269 +1,359 @@ |
||||
// Copyright 2013 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package config |
||||
|
||||
import ( |
||||
"errors" |
||||
"fmt" |
||||
"io/ioutil" |
||||
"regexp" |
||||
"strings" |
||||
"time" |
||||
|
||||
"github.com/golang/protobuf/proto" |
||||
"gopkg.in/yaml.v2" |
||||
|
||||
clientmodel "github.com/prometheus/client_golang/model" |
||||
|
||||
"github.com/prometheus/prometheus/utility" |
||||
|
||||
pb "github.com/prometheus/prometheus/config/generated" |
||||
) |
||||
|
||||
var ( |
||||
jobNameRE = regexp.MustCompile("^[a-zA-Z_][a-zA-Z0-9_-]*$") |
||||
labelNameRE = regexp.MustCompile("^[a-zA-Z_][a-zA-Z0-9_]*$") |
||||
) |
||||
var jobNameRE = regexp.MustCompile("^[a-zA-Z_][a-zA-Z0-9_-]*$") |
||||
|
||||
// Config encapsulates the configuration of a Prometheus instance. It wraps the
|
||||
// raw configuration protocol buffer to be able to add custom methods to it.
|
||||
type Config struct { |
||||
// The protobuf containing the actual configuration values.
|
||||
pb.PrometheusConfig |
||||
// Load parses the YAML input s into a Config.
|
||||
func Load(s string) (*Config, error) { |
||||
cfg := &Config{ |
||||
original: s, |
||||
} |
||||
err := yaml.Unmarshal([]byte(s), cfg) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
return cfg, nil |
||||
} |
||||
|
||||
// String returns an ASCII serialization of the loaded configuration protobuf.
|
||||
func (c *Config) String() string { |
||||
return proto.MarshalTextString(&c.PrometheusConfig) |
||||
// LoadFromFile parses the given YAML file into a Config.
|
||||
func LoadFromFile(filename string) (*Config, error) { |
||||
content, err := ioutil.ReadFile(filename) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
return Load(string(content)) |
||||
} |
||||
|
||||
// validateLabels validates whether label names have the correct format.
|
||||
func validateLabels(labels *pb.LabelPairs) error { |
||||
if labels == nil { |
||||
return nil |
||||
// The defaults applied before parsing the respective config sections.
|
||||
var ( |
||||
// The default top-level configuration.
|
||||
DefaultConfig = DefaultedConfig{ |
||||
GlobalConfig: &GlobalConfig{DefaultGlobalConfig}, |
||||
} |
||||
for _, label := range labels.Label { |
||||
if !labelNameRE.MatchString(label.GetName()) { |
||||
return fmt.Errorf("invalid label name '%s'", label.GetName()) |
||||
} |
||||
|
||||
// The default global configuration.
|
||||
DefaultGlobalConfig = DefaultedGlobalConfig{ |
||||
ScrapeInterval: Duration(10 * time.Second), |
||||
ScrapeTimeout: Duration(10 * time.Second), |
||||
EvaluationInterval: Duration(1 * time.Minute), |
||||
} |
||||
return nil |
||||
} |
||||
|
||||
// validateHosts validates whether a target group contains valid hosts.
|
||||
func validateHosts(hosts []string) error { |
||||
if hosts == nil { |
||||
return nil |
||||
// Te default scrape configuration.
|
||||
DefaultScrapeConfig = DefaultedScrapeConfig{ |
||||
// ScrapeTimeout and ScrapeInterval default to the
|
||||
// configured globals.
|
||||
MetricsPath: "/metrics", |
||||
Scheme: "http", |
||||
} |
||||
for _, host := range hosts { |
||||
// Make sure that this does not contain any paths or schemes.
|
||||
// This ensures that old configurations error.
|
||||
if strings.Contains(host, "/") { |
||||
return fmt.Errorf("invalid host '%s', no schemes or paths allowed", host) |
||||
} |
||||
|
||||
// The default Relabel configuration.
|
||||
DefaultRelabelConfig = DefaultedRelabelConfig{ |
||||
Action: RelabelReplace, |
||||
Separator: ";", |
||||
} |
||||
return nil |
||||
} |
||||
|
||||
// Validate checks an entire parsed Config for the validity of its fields.
|
||||
func (c *Config) Validate() error { |
||||
// Check the global configuration section for validity.
|
||||
global := c.Global |
||||
if _, err := utility.StringToDuration(global.GetScrapeInterval()); err != nil { |
||||
return fmt.Errorf("invalid global scrape interval: %s", err) |
||||
// The default DNS SD configuration.
|
||||
DefaultDNSConfig = DefaultedDNSConfig{ |
||||
RefreshInterval: Duration(30 * time.Second), |
||||
} |
||||
if _, err := utility.StringToDuration(global.GetEvaluationInterval()); err != nil { |
||||
return fmt.Errorf("invalid rule evaluation interval: %s", err) |
||||
) |
||||
|
||||
// Config is the top-level configuration for Prometheus's config files.
|
||||
type Config struct { |
||||
// DefaultedConfig contains the actual fields of Config.
|
||||
DefaultedConfig `yaml:",inline"` |
||||
|
||||
// original is the input from which the config was parsed.
|
||||
original string |
||||
} |
||||
|
||||
func (c *Config) String() string { |
||||
if c.original != "" { |
||||
return c.original |
||||
} |
||||
if err := validateLabels(global.Labels); err != nil { |
||||
return fmt.Errorf("invalid global labels: %s", err) |
||||
b, err := yaml.Marshal(c) |
||||
if err != nil { |
||||
return fmt.Sprintf("<error creating config string: %s>", err) |
||||
} |
||||
return string(b) |
||||
} |
||||
|
||||
// Check each scrape configuration for validity.
|
||||
// UnmarshalYAML implements the yaml.Unmarshaller interface.
|
||||
func (c *Config) UnmarshalYAML(unmarshal func(interface{}) error) error { |
||||
c.DefaultedConfig = DefaultConfig |
||||
if err := unmarshal(&c.DefaultedConfig); err != nil { |
||||
return err |
||||
} |
||||
// Do global overrides and validate unique names.
|
||||
jobNames := map[string]struct{}{} |
||||
for _, sc := range c.ScrapeConfigs() { |
||||
name := sc.GetJobName() |
||||
|
||||
if _, ok := jobNames[name]; ok { |
||||
return fmt.Errorf("found multiple scrape configs configured with the same job name: %q", name) |
||||
for _, scfg := range c.ScrapeConfigs { |
||||
if scfg.ScrapeInterval == 0 { |
||||
scfg.ScrapeInterval = c.GlobalConfig.ScrapeInterval |
||||
} |
||||
if scfg.ScrapeTimeout == 0 { |
||||
scfg.ScrapeTimeout = c.GlobalConfig.ScrapeTimeout |
||||
} |
||||
jobNames[name] = struct{}{} |
||||
|
||||
if err := sc.Validate(); err != nil { |
||||
return fmt.Errorf("error in scrape config %q: %s", name, err) |
||||
if _, ok := jobNames[scfg.JobName]; ok { |
||||
return fmt.Errorf("found multiple scrape configs with job name %q", scfg.JobName) |
||||
} |
||||
jobNames[scfg.JobName] = struct{}{} |
||||
} |
||||
|
||||
return nil |
||||
} |
||||
|
||||
// GlobalLabels returns the global labels as a LabelSet.
|
||||
func (c *Config) GlobalLabels() clientmodel.LabelSet { |
||||
labels := clientmodel.LabelSet{} |
||||
if c.Global != nil && c.Global.Labels != nil { |
||||
for _, label := range c.Global.Labels.Label { |
||||
labels[clientmodel.LabelName(label.GetName())] = clientmodel.LabelValue(label.GetValue()) |
||||
} |
||||
} |
||||
return labels |
||||
// DefaultedConfig is a proxy type for Config.
|
||||
type DefaultedConfig struct { |
||||
GlobalConfig *GlobalConfig `yaml:"global_config"` |
||||
RuleFiles []string `yaml:"rule_files,omitempty"` |
||||
ScrapeConfigs []*ScrapeConfig `yaml:"scrape_configs,omitempty"` |
||||
} |
||||
|
||||
// ScrapeConfigs returns all scrape configurations.
|
||||
func (c *Config) ScrapeConfigs() (cfgs []*ScrapeConfig) { |
||||
for _, sc := range c.GetScrapeConfig() { |
||||
cfgs = append(cfgs, &ScrapeConfig{*sc}) |
||||
} |
||||
return |
||||
// GlobalConfig configures values that used across other configuration
|
||||
// objects.
|
||||
type GlobalConfig struct { |
||||
// DefaultedGlobalConfig contains the actual fields for GlobalConfig.
|
||||
DefaultedGlobalConfig `yaml:",inline"` |
||||
} |
||||
|
||||
// stringToDuration converts a string to a duration and dies on invalid format.
|
||||
func stringToDuration(intervalStr string) time.Duration { |
||||
duration, err := utility.StringToDuration(intervalStr) |
||||
if err != nil { |
||||
panic(err) |
||||
// UnmarshalYAML implements the yaml.Unmarshaller interface.
|
||||
func (c *GlobalConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { |
||||
c.DefaultedGlobalConfig = DefaultGlobalConfig |
||||
if err := unmarshal(&c.DefaultedGlobalConfig); err != nil { |
||||
return err |
||||
} |
||||
return duration |
||||
} |
||||
|
||||
// ScrapeInterval gets the default scrape interval for a Config.
|
||||
func (c *Config) ScrapeInterval() time.Duration { |
||||
return stringToDuration(c.Global.GetScrapeInterval()) |
||||
return nil |
||||
} |
||||
|
||||
// EvaluationInterval gets the default evaluation interval for a Config.
|
||||
func (c *Config) EvaluationInterval() time.Duration { |
||||
return stringToDuration(c.Global.GetEvaluationInterval()) |
||||
// DefaultedGlobalConfig is a proxy type for GlobalConfig.
|
||||
type DefaultedGlobalConfig struct { |
||||
// How frequently to scrape targets by default.
|
||||
ScrapeInterval Duration `yaml:"scrape_interval"` |
||||
// The default timeout when scraping targets.
|
||||
ScrapeTimeout Duration `yaml:"scrape_timeout"` |
||||
// How frequently to evaluate rules by default.
|
||||
EvaluationInterval Duration `yaml:"evaluation_interval"` |
||||
|
||||
// The labels to add to any timeseries that this Prometheus instance scrapes.
|
||||
Labels clientmodel.LabelSet `yaml:"labels,omitempty"` |
||||
} |
||||
|
||||
// ScrapeConfig encapsulates a protobuf scrape configuration.
|
||||
// ScrapeConfig configures a scraping unit for Prometheus.
|
||||
type ScrapeConfig struct { |
||||
pb.ScrapeConfig |
||||
// DefaultedScrapeConfig contains the actual fields for ScrapeConfig.
|
||||
DefaultedScrapeConfig `yaml:",inline"` |
||||
} |
||||
|
||||
// ScrapeInterval gets the scrape interval for the scrape config.
|
||||
func (c *ScrapeConfig) ScrapeInterval() time.Duration { |
||||
return stringToDuration(c.GetScrapeInterval()) |
||||
// UnmarshalYAML implements the yaml.Unmarshaller interface.
|
||||
func (c *ScrapeConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { |
||||
c.DefaultedScrapeConfig = DefaultScrapeConfig |
||||
err := unmarshal(&c.DefaultedScrapeConfig) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
if !jobNameRE.MatchString(c.JobName) { |
||||
return fmt.Errorf("%q is not a valid job name", c.JobName) |
||||
} |
||||
return nil |
||||
} |
||||
|
||||
// ScrapeTimeout gets the scrape timeout for the scrape config.
|
||||
func (c *ScrapeConfig) ScrapeTimeout() time.Duration { |
||||
return stringToDuration(c.GetScrapeTimeout()) |
||||
// DefaultedScrapeConfig is a proxy type for ScrapeConfig.
|
||||
type DefaultedScrapeConfig struct { |
||||
// The job name to which the job label is set by default.
|
||||
JobName string `yaml:"job_name"` |
||||
// How frequently to scrape the targets of this scrape config.
|
||||
ScrapeInterval Duration `yaml:"scrape_interval"` |
||||
// The timeout for scraping targets of this config.
|
||||
ScrapeTimeout Duration `yaml:"scrape_timeout"` |
||||
// The HTTP resource path on which to fetch metrics from targets.
|
||||
MetricsPath string `yaml:"metrics_path"` |
||||
// The URL scheme with which to fetch metrics from targets.
|
||||
Scheme string `yaml:"scheme"` |
||||
|
||||
// List of labeled target groups for this job.
|
||||
TargetGroups []*TargetGroup `yaml:"target_groups,omitempty"` |
||||
// List of DNS service discovery configurations.
|
||||
DNSConfigs []*DNSConfig `yaml:"dns_configs,omitempty"` |
||||
// List of relabel configurations.
|
||||
RelabelConfigs []*RelabelConfig `yaml:"relabel_configs,omitempty"` |
||||
} |
||||
|
||||
// Labels returns a label set for the targets that is implied by the scrape config.
|
||||
func (c *ScrapeConfig) Labels() clientmodel.LabelSet { |
||||
return clientmodel.LabelSet{ |
||||
clientmodel.MetricsPathLabel: clientmodel.LabelValue(c.GetMetricsPath()), |
||||
clientmodel.JobLabel: clientmodel.LabelValue(c.GetJobName()), |
||||
} |
||||
// A labeled group of targets to scrape for a job.
|
||||
type TargetGroup struct { |
||||
// Targets is a list of targets identified by a label set. Each target is
|
||||
// uniquely identifiable in the group by its address label.
|
||||
Targets []clientmodel.LabelSet `yaml:"targets,omitempty" json:"targets,omitempty"` |
||||
// Labels is a set of labels that is common across all targets in the group.
|
||||
Labels clientmodel.LabelSet `yaml:"labels,omitempty" json:"labels,omitempty"` |
||||
|
||||
// Source is an identifier that describes a group of targets.
|
||||
Source string `yaml:"-", json:"-"` |
||||
} |
||||
|
||||
// Validate checks the ScrapeConfig for the validity of its fields
|
||||
func (c *ScrapeConfig) Validate() error { |
||||
name := c.GetJobName() |
||||
func (tg *TargetGroup) String() string { |
||||
return tg.Source |
||||
} |
||||
|
||||
if !jobNameRE.MatchString(name) { |
||||
return fmt.Errorf("invalid job name %q", name) |
||||
// UnmarshalYAML implements the yaml.Unmarshaller interface.
|
||||
func (tg *TargetGroup) UnmarshalYAML(unmarshal func(interface{}) error) error { |
||||
g := struct { |
||||
Targets []string `yaml:"targets"` |
||||
Labels clientmodel.LabelSet `yaml:"labels"` |
||||
}{} |
||||
if err := unmarshal(&g); err != nil { |
||||
return err |
||||
} |
||||
if _, err := utility.StringToDuration(c.GetScrapeInterval()); err != nil { |
||||
return fmt.Errorf("invalid scrape interval: %s", err) |
||||
} |
||||
if _, err := utility.StringToDuration(c.GetScrapeTimeout()); err != nil { |
||||
return fmt.Errorf("invalid scrape timeout: %s", err) |
||||
} |
||||
for _, tgroup := range c.GetTargetGroup() { |
||||
if err := validateLabels(tgroup.Labels); err != nil { |
||||
return fmt.Errorf("invalid labels: %s", err) |
||||
} |
||||
if err := validateHosts(tgroup.Target); err != nil { |
||||
return fmt.Errorf("invalid targets: %s", err) |
||||
tg.Targets = make([]clientmodel.LabelSet, 0, len(g.Targets)) |
||||
for _, t := range g.Targets { |
||||
if strings.Contains(t, "/") { |
||||
return fmt.Errorf("%q is not a valid hostname", t) |
||||
} |
||||
tg.Targets = append(tg.Targets, clientmodel.LabelSet{ |
||||
clientmodel.AddressLabel: clientmodel.LabelValue(t), |
||||
}) |
||||
} |
||||
for _, dnscfg := range c.DNSConfigs() { |
||||
if err := dnscfg.Validate(); err != nil { |
||||
return fmt.Errorf("invalid DNS config: %s", err) |
||||
} |
||||
tg.Labels = g.Labels |
||||
return nil |
||||
} |
||||
|
||||
// DNSConfig is the configuration for DNS based service discovery.
|
||||
type DNSConfig struct { |
||||
// DefaultedDNSConfig contains the actual fields for DNSConfig.
|
||||
DefaultedDNSConfig `yaml:",inline"` |
||||
} |
||||
|
||||
// UnmarshalYAML implements the yaml.Unmarshaller interface.
|
||||
func (c *DNSConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { |
||||
c.DefaultedDNSConfig = DefaultDNSConfig |
||||
err := unmarshal(&c.DefaultedDNSConfig) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
for _, rlcfg := range c.RelabelConfigs() { |
||||
if err := rlcfg.Validate(); err != nil { |
||||
return fmt.Errorf("invalid relabelling config: %s", err) |
||||
} |
||||
if len(c.Names) == 0 { |
||||
return fmt.Errorf("DNS config must contain at least one SRV server name") |
||||
} |
||||
return nil |
||||
} |
||||
|
||||
// DNSConfigs returns the list of DNS service discovery configurations
|
||||
// for the scrape config.
|
||||
func (c *ScrapeConfig) DNSConfigs() []*DNSConfig { |
||||
var dnscfgs []*DNSConfig |
||||
for _, dc := range c.GetDnsConfig() { |
||||
dnscfgs = append(dnscfgs, &DNSConfig{*dc}) |
||||
} |
||||
return dnscfgs |
||||
// DefaultedDNSConfig is a proxy type for DNSConfig.
|
||||
type DefaultedDNSConfig struct { |
||||
Names []string `yaml:"names"` |
||||
RefreshInterval Duration `yaml:"refresh_interval"` |
||||
} |
||||
|
||||
// RelabelConfigs returns the relabel configs of the scrape config.
|
||||
func (c *ScrapeConfig) RelabelConfigs() []*RelabelConfig { |
||||
var rlcfgs []*RelabelConfig |
||||
for _, rc := range c.GetRelabelConfig() { |
||||
rlcfgs = append(rlcfgs, &RelabelConfig{*rc}) |
||||
// RelabelAction is the action to be performed on relabeling.
|
||||
type RelabelAction string |
||||
|
||||
const ( |
||||
// Performs a regex replacement.
|
||||
RelabelReplace RelabelAction = "replace" |
||||
// Drops targets for which the input does not match the regex.
|
||||
RelabelKeep = "keep" |
||||
// Drops targets for which the input does match the regex.
|
||||
RelabelDrop = "drop" |
||||
) |
||||
|
||||
// UnmarshalYAML implements the yaml.Unmarshaller interface.
|
||||
func (a *RelabelAction) UnmarshalYAML(unmarshal func(interface{}) error) error { |
||||
var s string |
||||
if err := unmarshal(&s); err != nil { |
||||
return err |
||||
} |
||||
switch act := RelabelAction(strings.ToLower(s)); act { |
||||
case RelabelReplace, RelabelKeep, RelabelDrop: |
||||
*a = act |
||||
return nil |
||||
} |
||||
return rlcfgs |
||||
return fmt.Errorf("unknown relabel action %q", s) |
||||
} |
||||
|
||||
// DNSConfig encapsulates the protobuf configuration object for DNS based
|
||||
// service discovery.
|
||||
type DNSConfig struct { |
||||
pb.DNSConfig |
||||
// RelabelConfig is the configuration for relabeling of target label sets.
|
||||
type RelabelConfig struct { |
||||
// DefaultedRelabelConfig contains the actual fields for RelabelConfig.
|
||||
DefaultedRelabelConfig `yaml:",inline"` |
||||
} |
||||
|
||||
// Validate checks the DNSConfig for the validity of its fields.
|
||||
func (c *DNSConfig) Validate() error { |
||||
if _, err := utility.StringToDuration(c.GetRefreshInterval()); err != nil { |
||||
return fmt.Errorf("invalid refresh interval: %s", err) |
||||
} |
||||
return nil |
||||
// UnmarshalYAML implements the yaml.Unmarshaller interface.
|
||||
func (c *RelabelConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { |
||||
c.DefaultedRelabelConfig = DefaultRelabelConfig |
||||
return unmarshal(&c.DefaultedRelabelConfig) |
||||
} |
||||
|
||||
// RefreshInterval gets the the refresh interval for DNS service discovery.
|
||||
func (c *DNSConfig) RefreshInterval() time.Duration { |
||||
return stringToDuration(c.GetRefreshInterval()) |
||||
// DefaultedRelabelConfig is a proxy type for RelabelConfig.
|
||||
type DefaultedRelabelConfig struct { |
||||
// A list of labels from which values are taken and concatenated
|
||||
// with the configured separator in order.
|
||||
SourceLabels clientmodel.LabelNames `yaml:"source_labels,flow"` |
||||
// Separator is the string between concatenated values from the source labels.
|
||||
Separator string `yaml:"separator"` |
||||
// Regex against which the concatenation is matched.
|
||||
Regex *Regexp `yaml:"regex"` |
||||
// The label to which the resulting string is written in a replacement.
|
||||
TargetLabel clientmodel.LabelName `yaml:"target_label,omitempty"` |
||||
// Replacement is the regex replacement pattern to be used.
|
||||
Replacement string `yaml:"replacement,omitempty"` |
||||
// Action is the action to be performed for the relabeling.
|
||||
Action RelabelAction `yaml:"action"` |
||||
} |
||||
|
||||
// RelabelConfig encapsulates the protobuf configuration object for relabeling.
|
||||
type RelabelConfig struct { |
||||
pb.RelabelConfig |
||||
// Regexp encapsulates a regexp.Regexp and makes it YAML marshallable.
|
||||
type Regexp struct { |
||||
regexp.Regexp |
||||
} |
||||
|
||||
// Validate checks the RelabelConfig for the validity of its fields.
|
||||
func (c *RelabelConfig) Validate() error { |
||||
if len(c.GetSourceLabel()) == 0 { |
||||
return errors.New("at least one source label is required") |
||||
// UnmarshalYAML implements the yaml.Unmarshaller interface.
|
||||
func (re *Regexp) UnmarshalYAML(unmarshal func(interface{}) error) error { |
||||
var s string |
||||
if err := unmarshal(&s); err != nil { |
||||
return err |
||||
} |
||||
regex, err := regexp.Compile(s) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
re.Regexp = *regex |
||||
return nil |
||||
} |
||||
|
||||
// TargetGroup is derived from a protobuf TargetGroup and attaches a source to it
|
||||
// that identifies the origin of the group.
|
||||
type TargetGroup struct { |
||||
// Source is an identifier that describes a group of targets.
|
||||
Source string |
||||
// Labels is a set of labels that is common across all targets in the group.
|
||||
Labels clientmodel.LabelSet |
||||
// Targets is a list of targets identified by a label set. Each target is
|
||||
// uniquely identifiable in the group by its address label.
|
||||
Targets []clientmodel.LabelSet |
||||
// MarshalYAML implements the yaml.Marshaller interface.
|
||||
func (re *Regexp) MarshalYAML() (interface{}, error) { |
||||
return re.String(), nil |
||||
} |
||||
|
||||
func (tg *TargetGroup) String() string { |
||||
return tg.Source |
||||
// Duration encapsulates a time.Duration and makes it YAML marshallable.
|
||||
//
|
||||
// TODO(fabxc): Since we have custom types for most things, including timestamps,
|
||||
// we might want to move this into our model as well, eventually.
|
||||
type Duration time.Duration |
||||
|
||||
// UnmarshalYAML implements the yaml.Unmarshaller interface.
|
||||
func (d *Duration) UnmarshalYAML(unmarshal func(interface{}) error) error { |
||||
var s string |
||||
if err := unmarshal(&s); err != nil { |
||||
return err |
||||
} |
||||
dur, err := utility.StringToDuration(s) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
*d = Duration(dur) |
||||
return nil |
||||
} |
||||
|
||||
// MarshalYAML implements the yaml.Marshaller interface.
|
||||
func (d Duration) MarshalYAML() (interface{}, error) { |
||||
return utility.DurationToString(time.Duration(d)), nil |
||||
} |
||||
|
||||
@ -1,117 +0,0 @@ |
||||
// Copyright 2013 The Prometheus Authors |
||||
// Licensed under the Apache License, Version 2.0 (the "License"); |
||||
// you may not use this file except in compliance with the License. |
||||
// You may obtain a copy of the License at |
||||
// |
||||
// http://www.apache.org/licenses/LICENSE-2.0 |
||||
// |
||||
// Unless required by applicable law or agreed to in writing, software |
||||
// distributed under the License is distributed on an "AS IS" BASIS, |
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
// See the License for the specific language governing permissions and |
||||
// limitations under the License. |
||||
|
||||
package io.prometheus; |
||||
|
||||
// A label/value pair suitable for attaching to timeseries. |
||||
message LabelPair { |
||||
// The name of the label. Must adhere to the regex "[a-zA-Z_][a-zA-Z0-9_]*". |
||||
optional string name = 1; |
||||
// The value of the label. May contain any characters. |
||||
optional string value = 2; |
||||
} |
||||
|
||||
// A set of label/value pairs. |
||||
message LabelPairs { |
||||
repeated LabelPair label = 1; |
||||
} |
||||
|
||||
// The global Prometheus configuration section. |
||||
message GlobalConfig { |
||||
// How frequently to scrape targets by default. Must be a valid Prometheus |
||||
// duration string in the form "[0-9]+[smhdwy]". |
||||
optional string scrape_interval = 1 [default = "1m"]; |
||||
// How frequently to evaluate rules by default. Must be a valid Prometheus |
||||
// duration string in the form "[0-9]+[smhdwy]". |
||||
optional string evaluation_interval = 2 [default = "1m"]; |
||||
// The labels to add to any timeseries that this Prometheus instance scrapes. |
||||
optional LabelPairs labels = 3; |
||||
// The list of file names of rule files to load. |
||||
repeated string rule_file = 4; |
||||
} |
||||
|
||||
// A labeled group of targets to scrape for a job. |
||||
message TargetGroup { |
||||
// The list of endpoints to scrape via HTTP. |
||||
repeated string target = 1; |
||||
// The labels to add to any timeseries scraped for this target group. |
||||
optional LabelPairs labels = 2; |
||||
} |
||||
|
||||
// The configuration for DNS based service discovery. |
||||
message DNSConfig { |
||||
// The list of DNS-SD service names pointing to SRV records |
||||
// containing endpoint information. |
||||
repeated string name = 1; |
||||
// Discovery refresh period when using DNS-SD to discover targets. Must be a |
||||
// valid Prometheus duration string in the form "[0-9]+[smhdwy]". |
||||
optional string refresh_interval = 2 [default = "30s"]; |
||||
} |
||||
|
||||
// The configuration for relabeling of target label sets. |
||||
message RelabelConfig { |
||||
// A list of labels from which values are taken and concatenated |
||||
// with the configured separator in order. |
||||
repeated string source_label = 1; |
||||
// Regex against which the concatenation is matched. |
||||
required string regex = 2; |
||||
// The label to which the resulting string is written in a replacement. |
||||
optional string target_label = 3; |
||||
// Replacement is the regex replacement pattern to be used. |
||||
optional string replacement = 4; |
||||
// Separator is the string between concatenated values from the source labels. |
||||
optional string separator = 5 [default = ";"]; |
||||
|
||||
// Action is the action to be performed for the relabeling. |
||||
enum Action { |
||||
REPLACE = 0; // Performs a regex replacement. |
||||
KEEP = 1; // Drops targets for which the input does not match the regex. |
||||
DROP = 2; // Drops targets for which the input does match the regex. |
||||
} |
||||
optional Action action = 6 [default = REPLACE]; |
||||
} |
||||
|
||||
// The configuration for a Prometheus job to scrape. |
||||
// |
||||
// The next field no. is 11. |
||||
message ScrapeConfig { |
||||
// The job name. Must adhere to the regex "[a-zA-Z_][a-zA-Z0-9_-]*". |
||||
required string job_name = 1; |
||||
// How frequently to scrape targets from this job. Overrides the global |
||||
// default. Must be a valid Prometheus duration string in the form |
||||
// "[0-9]+[smhdwy]". |
||||
optional string scrape_interval = 2; |
||||
// Per-target timeout when scraping this job. Must be a valid Prometheus |
||||
// duration string in the form "[0-9]+[smhdwy]". |
||||
optional string scrape_timeout = 7 [default = "10s"]; |
||||
// List of DNS service discovery configurations. |
||||
repeated DNSConfig dns_config = 9; |
||||
// List of labeled target groups for this job. |
||||
repeated TargetGroup target_group = 5; |
||||
// List of relabel configurations. |
||||
repeated RelabelConfig relabel_config = 10; |
||||
// The HTTP resource path on which to fetch metrics from targets. |
||||
optional string metrics_path = 6 [default = "/metrics"]; |
||||
// The URL scheme with which to fetch metrics from targets. |
||||
optional string scheme = 8 [default = "http"]; |
||||
} |
||||
|
||||
// The top-level Prometheus configuration. |
||||
message PrometheusConfig { |
||||
// Global Prometheus configuration options. If omitted, an empty global |
||||
// configuration with default values (see GlobalConfig definition) will be |
||||
// created. |
||||
optional GlobalConfig global = 1; |
||||
// The list of scrape configs. |
||||
repeated ScrapeConfig scrape_config = 3; |
||||
} |
||||
@ -1,80 +1,152 @@ |
||||
// Copyright 2013 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package config |
||||
|
||||
import ( |
||||
"path" |
||||
"reflect" |
||||
"regexp" |
||||
"strings" |
||||
"testing" |
||||
"time" |
||||
|
||||
"gopkg.in/yaml.v2" |
||||
|
||||
clientmodel "github.com/prometheus/client_golang/model" |
||||
) |
||||
|
||||
var fixturesPath = "fixtures" |
||||
var expectedConf = &Config{DefaultedConfig{ |
||||
GlobalConfig: &GlobalConfig{DefaultedGlobalConfig{ |
||||
ScrapeInterval: Duration(15 * time.Second), |
||||
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, |
||||
EvaluationInterval: Duration(30 * time.Second), |
||||
|
||||
Labels: clientmodel.LabelSet{ |
||||
"monitor": "codelab", |
||||
"foo": "bar", |
||||
}, |
||||
}}, |
||||
|
||||
RuleFiles: []string{ |
||||
"first.rules", |
||||
"second.rules", |
||||
}, |
||||
|
||||
ScrapeConfigs: []*ScrapeConfig{ |
||||
{DefaultedScrapeConfig{ |
||||
JobName: "prometheus", |
||||
|
||||
ScrapeInterval: Duration(15 * time.Second), |
||||
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, |
||||
|
||||
MetricsPath: DefaultScrapeConfig.MetricsPath, |
||||
Scheme: DefaultScrapeConfig.Scheme, |
||||
|
||||
TargetGroups: []*TargetGroup{ |
||||
{ |
||||
Targets: []clientmodel.LabelSet{ |
||||
{clientmodel.AddressLabel: "localhost:9090"}, |
||||
{clientmodel.AddressLabel: "localhost:9191"}, |
||||
}, |
||||
Labels: clientmodel.LabelSet{ |
||||
"my": "label", |
||||
"your": "label", |
||||
}, |
||||
}, |
||||
}, |
||||
|
||||
RelabelConfigs: []*RelabelConfig{ |
||||
{DefaultedRelabelConfig{ |
||||
SourceLabels: clientmodel.LabelNames{"job", "__meta_dns_srv_name"}, |
||||
TargetLabel: "job", |
||||
Separator: ";", |
||||
Regex: &Regexp{*regexp.MustCompile("(.*)some-[regex]$")}, |
||||
Replacement: "foo-${1}", |
||||
Action: RelabelReplace, |
||||
}}, |
||||
}, |
||||
}}, |
||||
{DefaultedScrapeConfig{ |
||||
JobName: "service-x", |
||||
|
||||
ScrapeInterval: Duration(50 * time.Second), |
||||
ScrapeTimeout: Duration(5 * time.Second), |
||||
|
||||
MetricsPath: "/my_path", |
||||
Scheme: "http", |
||||
|
||||
DNSConfigs: []*DNSConfig{ |
||||
{DefaultedDNSConfig{ |
||||
Names: []string{ |
||||
"first.dns.address.domain.com", |
||||
"second.dns.address.domain.com", |
||||
}, |
||||
RefreshInterval: Duration(15 * time.Second), |
||||
}}, |
||||
{DefaultedDNSConfig{ |
||||
Names: []string{ |
||||
"first.dns.address.domain.com", |
||||
}, |
||||
RefreshInterval: Duration(30 * time.Second), |
||||
}}, |
||||
}, |
||||
|
||||
RelabelConfigs: []*RelabelConfig{ |
||||
{DefaultedRelabelConfig{ |
||||
SourceLabels: clientmodel.LabelNames{"job"}, |
||||
Regex: &Regexp{*regexp.MustCompile("(.*)some-[regex]$")}, |
||||
Separator: ";", |
||||
Action: RelabelDrop, |
||||
}}, |
||||
}, |
||||
}}, |
||||
}, |
||||
}, ""} |
||||
|
||||
func TestLoadConfig(t *testing.T) { |
||||
c, err := LoadFromFile("testdata/conf.good.yml") |
||||
if err != nil { |
||||
t.Errorf("Error parsing %s: %s", "testdata/conf.good.yml", err) |
||||
} |
||||
bgot, err := yaml.Marshal(c) |
||||
if err != nil { |
||||
t.Errorf("%s", err) |
||||
} |
||||
bexp, err := yaml.Marshal(expectedConf) |
||||
if err != nil { |
||||
t.Errorf("%s", err) |
||||
} |
||||
expectedConf.original = c.original |
||||
|
||||
var configTests = []struct { |
||||
inputFile string |
||||
shouldFail bool |
||||
errContains string |
||||
if !reflect.DeepEqual(c, expectedConf) { |
||||
t.Errorf("%s: unexpected config result: \n\n%s\n expected\n\n%s", "testdata/conf.good.yml", bgot, bexp) |
||||
} |
||||
} |
||||
|
||||
var expectedErrors = []struct { |
||||
filename string |
||||
errMsg string |
||||
}{ |
||||
{ |
||||
inputFile: "minimal.conf.input", |
||||
}, { |
||||
inputFile: "sample.conf.input", |
||||
filename: "jobname.bad.yml", |
||||
errMsg: `"prom^etheus" is not a valid job name`, |
||||
}, { |
||||
inputFile: "empty.conf.input", |
||||
filename: "jobname_dup.bad.yml", |
||||
errMsg: `found multiple scrape configs with job name "prometheus"`, |
||||
}, { |
||||
inputFile: "sd_targets.conf.input", |
||||
filename: "labelname.bad.yml", |
||||
errMsg: `"not$allowed" is not a valid label name`, |
||||
}, { |
||||
inputFile: "full.conf.input", |
||||
}, { |
||||
inputFile: "invalid_proto_format.conf.input", |
||||
shouldFail: true, |
||||
errContains: "unknown field name", |
||||
}, |
||||
{ |
||||
inputFile: "invalid_scrape_interval.conf.input", |
||||
shouldFail: true, |
||||
errContains: "invalid global scrape interval", |
||||
}, |
||||
{ |
||||
inputFile: "invalid_job_name.conf.input", |
||||
shouldFail: true, |
||||
errContains: "invalid job name", |
||||
}, |
||||
{ |
||||
inputFile: "invalid_label_name.conf.input", |
||||
shouldFail: true, |
||||
errContains: "invalid label name", |
||||
}, |
||||
{ |
||||
inputFile: "repeated_job_name.conf.input", |
||||
shouldFail: true, |
||||
errContains: "found multiple scrape configs configured with the same job name: \"testjob1\"", |
||||
filename: "regex.bad.yml", |
||||
errMsg: "error parsing regexp", |
||||
}, |
||||
} |
||||
|
||||
func TestConfigs(t *testing.T) { |
||||
for i, configTest := range configTests { |
||||
_, err := LoadFromFile(path.Join(fixturesPath, configTest.inputFile)) |
||||
|
||||
if err != nil { |
||||
if !configTest.shouldFail { |
||||
t.Fatalf("%d. Error parsing config %v: %v", i, configTest.inputFile, err) |
||||
} else { |
||||
if !strings.Contains(err.Error(), configTest.errContains) { |
||||
t.Fatalf("%d. Expected error containing '%v', got: %v", i, configTest.errContains, err) |
||||
} |
||||
} |
||||
func TestBadConfigs(t *testing.T) { |
||||
for _, ee := range expectedErrors { |
||||
_, err := LoadFromFile("testdata/" + ee.filename) |
||||
if err == nil { |
||||
t.Errorf("Expected error parsing %s but got none", ee.filename) |
||||
} |
||||
if !strings.Contains(err.Error(), ee.errMsg) { |
||||
t.Errorf("Expected error for %s to contain %q but got: %s", ee.filename, ee.errMsg, err) |
||||
} |
||||
} |
||||
} |
||||
|
||||
@ -1,89 +0,0 @@ |
||||
global < |
||||
scrape_interval: "30s" |
||||
evaluation_interval: "30s" |
||||
labels: < |
||||
label: < |
||||
name: "monitor" |
||||
value: "test" |
||||
> |
||||
label: < |
||||
name: "more" |
||||
value: "test" |
||||
> |
||||
> |
||||
rule_file: "prometheus.rules" |
||||
rule_file: "prometheus.more.rules" |
||||
> |
||||
|
||||
scrape_config: < |
||||
job_name: "prometheus" |
||||
scrape_interval: "15s" |
||||
metrics_path: "/metrics" |
||||
scheme: "http" |
||||
|
||||
target_group: < |
||||
target: "localhost:9090" |
||||
> |
||||
> |
||||
|
||||
scrape_config: < |
||||
job_name: "myjob" |
||||
scrape_interval: "15s" |
||||
metrics_path: "/metrics" |
||||
scheme: "http" |
||||
|
||||
dns_config: < |
||||
name: "first.srv.name" |
||||
name: "second.srv.name" |
||||
refresh_interval: "1h" |
||||
> |
||||
|
||||
dns_config: < |
||||
name: "first2.srv.name" |
||||
name: "second2.srv.name" |
||||
refresh_interval: "1m" |
||||
> |
||||
|
||||
relabel_config: < |
||||
source_label: "l1" |
||||
source_label: "l2" |
||||
regex: "^foobar.*$" |
||||
target_label: "l3" |
||||
replacement: "static" |
||||
> |
||||
relabel_config: < |
||||
source_label: "l4" |
||||
regex: "^foobar.*$" |
||||
action: DROP |
||||
> |
||||
relabel_config: < |
||||
source_label: "l4" |
||||
regex: "^foobar.*$" |
||||
action: KEEP |
||||
> |
||||
|
||||
target_group: < |
||||
target: "localhost:9090" |
||||
target: "localhost:9091" |
||||
labels: < |
||||
label: < |
||||
name: "tg1" |
||||
value: "tg1" |
||||
> |
||||
> |
||||
> |
||||
target_group: < |
||||
target: "my.domain:9090" |
||||
target: "my.domain:9091" |
||||
labels: < |
||||
label: < |
||||
name: "tg2" |
||||
value: "tg2" |
||||
> |
||||
label: < |
||||
name: "tg2_1" |
||||
value: "tg2_1" |
||||
> |
||||
> |
||||
> |
||||
> |
||||
@ -1,3 +0,0 @@ |
||||
scrape_config: < |
||||
job_name: "1testjob" |
||||
> |
||||
@ -1,10 +0,0 @@ |
||||
global < |
||||
scrape_interval: "30s" |
||||
evaluation_interval: "30s" |
||||
labels: < |
||||
label: < |
||||
name: "monitor-test" |
||||
value: "test" |
||||
> |
||||
> |
||||
> |
||||
@ -1,11 +0,0 @@ |
||||
global < |
||||
scrape_interval: "30s" |
||||
evaluation_interval: "30s" |
||||
unknown_field: "foo" |
||||
labels: < |
||||
label: < |
||||
name: "monitor" |
||||
value: "test" |
||||
> |
||||
> |
||||
> |
||||
@ -1,10 +0,0 @@ |
||||
global < |
||||
scrape_interval: "30" |
||||
evaluation_interval: "30s" |
||||
labels: < |
||||
label: < |
||||
name: "monitor" |
||||
value: "test" |
||||
> |
||||
> |
||||
> |
||||
@ -1,22 +0,0 @@ |
||||
global < |
||||
scrape_interval: "30s" |
||||
evaluation_interval: "30s" |
||||
labels: < |
||||
label: < |
||||
name: "monitor" |
||||
value: "test" |
||||
> |
||||
> |
||||
rule_file: "prometheus.rules" |
||||
> |
||||
|
||||
scrape_config: < |
||||
job_name: "prometheus" |
||||
scrape_interval: "15s" |
||||
metrics_path: "/metrics" |
||||
scheme: "http" |
||||
|
||||
target_group: < |
||||
target: "localhost:9090" |
||||
> |
||||
> |
||||
@ -1,11 +0,0 @@ |
||||
scrape_config: < |
||||
job_name: "testjob1" |
||||
> |
||||
|
||||
scrape_config: < |
||||
job_name: "testjob2" |
||||
> |
||||
|
||||
scrape_config: < |
||||
job_name: "testjob1" |
||||
> |
||||
@ -1,57 +0,0 @@ |
||||
global < |
||||
scrape_interval: "30s" |
||||
evaluation_interval: "30s" |
||||
labels: < |
||||
label: < |
||||
name: "monitor" |
||||
value: "test" |
||||
> |
||||
> |
||||
rule_file: "prometheus.rules" |
||||
> |
||||
|
||||
scrape_config: < |
||||
job_name: "prometheus" |
||||
scrape_interval: "15s" |
||||
|
||||
target_group: < |
||||
target: "localhost:9090" |
||||
labels: < |
||||
label: < |
||||
name: "group" |
||||
value: "canary" |
||||
> |
||||
> |
||||
> |
||||
> |
||||
|
||||
scrape_config: < |
||||
job_name: "random" |
||||
scrape_interval: "30s" |
||||
|
||||
target_group: < |
||||
target: "random.com:8080" |
||||
target: "random.com:8081" |
||||
target: "random.com:8082" |
||||
target: "random.com:8083" |
||||
target: "random.com:8084" |
||||
|
||||
labels: < |
||||
label: < |
||||
name: "group" |
||||
value: "production" |
||||
> |
||||
> |
||||
> |
||||
target_group: < |
||||
target: "random.com:8085" |
||||
target: "random.com:8086" |
||||
|
||||
labels: < |
||||
label: < |
||||
name: "group" |
||||
value: "canary" |
||||
> |
||||
> |
||||
> |
||||
> |
||||
@ -1,8 +0,0 @@ |
||||
scrape_config: < |
||||
job_name: "testjob" |
||||
dns_config: < |
||||
name: "sd_name" |
||||
name: "sd_name2" |
||||
refresh_interval: "15s" |
||||
> |
||||
> |
||||
@ -1,406 +0,0 @@ |
||||
// Code generated by protoc-gen-go.
|
||||
// source: config.proto
|
||||
// DO NOT EDIT!
|
||||
|
||||
/* |
||||
Package io_prometheus is a generated protocol buffer package. |
||||
|
||||
It is generated from these files: |
||||
config.proto |
||||
|
||||
It has these top-level messages: |
||||
LabelPair |
||||
LabelPairs |
||||
GlobalConfig |
||||
TargetGroup |
||||
DNSConfig |
||||
RelabelConfig |
||||
ScrapeConfig |
||||
PrometheusConfig |
||||
*/ |
||||
package io_prometheus |
||||
|
||||
import proto "github.com/golang/protobuf/proto" |
||||
import math "math" |
||||
|
||||
// Reference imports to suppress errors if they are not otherwise used.
|
||||
var _ = proto.Marshal |
||||
var _ = math.Inf |
||||
|
||||
// Action is the action to be performed for the relabeling.
|
||||
type RelabelConfig_Action int32 |
||||
|
||||
const ( |
||||
RelabelConfig_REPLACE RelabelConfig_Action = 0 |
||||
RelabelConfig_KEEP RelabelConfig_Action = 1 |
||||
RelabelConfig_DROP RelabelConfig_Action = 2 |
||||
) |
||||
|
||||
var RelabelConfig_Action_name = map[int32]string{ |
||||
0: "REPLACE", |
||||
1: "KEEP", |
||||
2: "DROP", |
||||
} |
||||
var RelabelConfig_Action_value = map[string]int32{ |
||||
"REPLACE": 0, |
||||
"KEEP": 1, |
||||
"DROP": 2, |
||||
} |
||||
|
||||
func (x RelabelConfig_Action) Enum() *RelabelConfig_Action { |
||||
p := new(RelabelConfig_Action) |
||||
*p = x |
||||
return p |
||||
} |
||||
func (x RelabelConfig_Action) String() string { |
||||
return proto.EnumName(RelabelConfig_Action_name, int32(x)) |
||||
} |
||||
func (x *RelabelConfig_Action) UnmarshalJSON(data []byte) error { |
||||
value, err := proto.UnmarshalJSONEnum(RelabelConfig_Action_value, data, "RelabelConfig_Action") |
||||
if err != nil { |
||||
return err |
||||
} |
||||
*x = RelabelConfig_Action(value) |
||||
return nil |
||||
} |
||||
|
||||
// A label/value pair suitable for attaching to timeseries.
|
||||
type LabelPair struct { |
||||
// The name of the label. Must adhere to the regex "[a-zA-Z_][a-zA-Z0-9_]*".
|
||||
Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` |
||||
// The value of the label. May contain any characters.
|
||||
Value *string `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` |
||||
XXX_unrecognized []byte `json:"-"` |
||||
} |
||||
|
||||
func (m *LabelPair) Reset() { *m = LabelPair{} } |
||||
func (m *LabelPair) String() string { return proto.CompactTextString(m) } |
||||
func (*LabelPair) ProtoMessage() {} |
||||
|
||||
func (m *LabelPair) GetName() string { |
||||
if m != nil && m.Name != nil { |
||||
return *m.Name |
||||
} |
||||
return "" |
||||
} |
||||
|
||||
func (m *LabelPair) GetValue() string { |
||||
if m != nil && m.Value != nil { |
||||
return *m.Value |
||||
} |
||||
return "" |
||||
} |
||||
|
||||
// A set of label/value pairs.
|
||||
type LabelPairs struct { |
||||
Label []*LabelPair `protobuf:"bytes,1,rep,name=label" json:"label,omitempty"` |
||||
XXX_unrecognized []byte `json:"-"` |
||||
} |
||||
|
||||
func (m *LabelPairs) Reset() { *m = LabelPairs{} } |
||||
func (m *LabelPairs) String() string { return proto.CompactTextString(m) } |
||||
func (*LabelPairs) ProtoMessage() {} |
||||
|
||||
func (m *LabelPairs) GetLabel() []*LabelPair { |
||||
if m != nil { |
||||
return m.Label |
||||
} |
||||
return nil |
||||
} |
||||
|
||||
// The global Prometheus configuration section.
|
||||
type GlobalConfig struct { |
||||
// How frequently to scrape targets by default. Must be a valid Prometheus
|
||||
// duration string in the form "[0-9]+[smhdwy]".
|
||||
ScrapeInterval *string `protobuf:"bytes,1,opt,name=scrape_interval,def=1m" json:"scrape_interval,omitempty"` |
||||
// How frequently to evaluate rules by default. Must be a valid Prometheus
|
||||
// duration string in the form "[0-9]+[smhdwy]".
|
||||
EvaluationInterval *string `protobuf:"bytes,2,opt,name=evaluation_interval,def=1m" json:"evaluation_interval,omitempty"` |
||||
// The labels to add to any timeseries that this Prometheus instance scrapes.
|
||||
Labels *LabelPairs `protobuf:"bytes,3,opt,name=labels" json:"labels,omitempty"` |
||||
// The list of file names of rule files to load.
|
||||
RuleFile []string `protobuf:"bytes,4,rep,name=rule_file" json:"rule_file,omitempty"` |
||||
XXX_unrecognized []byte `json:"-"` |
||||
} |
||||
|
||||
func (m *GlobalConfig) Reset() { *m = GlobalConfig{} } |
||||
func (m *GlobalConfig) String() string { return proto.CompactTextString(m) } |
||||
func (*GlobalConfig) ProtoMessage() {} |
||||
|
||||
const Default_GlobalConfig_ScrapeInterval string = "1m" |
||||
const Default_GlobalConfig_EvaluationInterval string = "1m" |
||||
|
||||
func (m *GlobalConfig) GetScrapeInterval() string { |
||||
if m != nil && m.ScrapeInterval != nil { |
||||
return *m.ScrapeInterval |
||||
} |
||||
return Default_GlobalConfig_ScrapeInterval |
||||
} |
||||
|
||||
func (m *GlobalConfig) GetEvaluationInterval() string { |
||||
if m != nil && m.EvaluationInterval != nil { |
||||
return *m.EvaluationInterval |
||||
} |
||||
return Default_GlobalConfig_EvaluationInterval |
||||
} |
||||
|
||||
func (m *GlobalConfig) GetLabels() *LabelPairs { |
||||
if m != nil { |
||||
return m.Labels |
||||
} |
||||
return nil |
||||
} |
||||
|
||||
func (m *GlobalConfig) GetRuleFile() []string { |
||||
if m != nil { |
||||
return m.RuleFile |
||||
} |
||||
return nil |
||||
} |
||||
|
||||
// A labeled group of targets to scrape for a job.
|
||||
type TargetGroup struct { |
||||
// The list of endpoints to scrape via HTTP.
|
||||
Target []string `protobuf:"bytes,1,rep,name=target" json:"target,omitempty"` |
||||
// The labels to add to any timeseries scraped for this target group.
|
||||
Labels *LabelPairs `protobuf:"bytes,2,opt,name=labels" json:"labels,omitempty"` |
||||
XXX_unrecognized []byte `json:"-"` |
||||
} |
||||
|
||||
func (m *TargetGroup) Reset() { *m = TargetGroup{} } |
||||
func (m *TargetGroup) String() string { return proto.CompactTextString(m) } |
||||
func (*TargetGroup) ProtoMessage() {} |
||||
|
||||
func (m *TargetGroup) GetTarget() []string { |
||||
if m != nil { |
||||
return m.Target |
||||
} |
||||
return nil |
||||
} |
||||
|
||||
func (m *TargetGroup) GetLabels() *LabelPairs { |
||||
if m != nil { |
||||
return m.Labels |
||||
} |
||||
return nil |
||||
} |
||||
|
||||
// The configuration for DNS based service discovery.
|
||||
type DNSConfig struct { |
||||
// The list of DNS-SD service names pointing to SRV records
|
||||
// containing endpoint information.
|
||||
Name []string `protobuf:"bytes,1,rep,name=name" json:"name,omitempty"` |
||||
// Discovery refresh period when using DNS-SD to discover targets. Must be a
|
||||
// valid Prometheus duration string in the form "[0-9]+[smhdwy]".
|
||||
RefreshInterval *string `protobuf:"bytes,2,opt,name=refresh_interval,def=30s" json:"refresh_interval,omitempty"` |
||||
XXX_unrecognized []byte `json:"-"` |
||||
} |
||||
|
||||
func (m *DNSConfig) Reset() { *m = DNSConfig{} } |
||||
func (m *DNSConfig) String() string { return proto.CompactTextString(m) } |
||||
func (*DNSConfig) ProtoMessage() {} |
||||
|
||||
const Default_DNSConfig_RefreshInterval string = "30s" |
||||
|
||||
func (m *DNSConfig) GetName() []string { |
||||
if m != nil { |
||||
return m.Name |
||||
} |
||||
return nil |
||||
} |
||||
|
||||
func (m *DNSConfig) GetRefreshInterval() string { |
||||
if m != nil && m.RefreshInterval != nil { |
||||
return *m.RefreshInterval |
||||
} |
||||
return Default_DNSConfig_RefreshInterval |
||||
} |
||||
|
||||
// The configuration for relabeling of target label sets.
|
||||
type RelabelConfig struct { |
||||
// A list of labels from which values are taken and concatenated
|
||||
// with the configured separator in order.
|
||||
SourceLabel []string `protobuf:"bytes,1,rep,name=source_label" json:"source_label,omitempty"` |
||||
// Regex against which the concatenation is matched.
|
||||
Regex *string `protobuf:"bytes,2,req,name=regex" json:"regex,omitempty"` |
||||
// The label to which the resulting string is written in a replacement.
|
||||
TargetLabel *string `protobuf:"bytes,3,opt,name=target_label" json:"target_label,omitempty"` |
||||
// Replacement is the regex replacement pattern to be used.
|
||||
Replacement *string `protobuf:"bytes,4,opt,name=replacement" json:"replacement,omitempty"` |
||||
// Separator is the string between concatenated values from the source labels.
|
||||
Separator *string `protobuf:"bytes,5,opt,name=separator,def=;" json:"separator,omitempty"` |
||||
Action *RelabelConfig_Action `protobuf:"varint,6,opt,name=action,enum=io.prometheus.RelabelConfig_Action,def=0" json:"action,omitempty"` |
||||
XXX_unrecognized []byte `json:"-"` |
||||
} |
||||
|
||||
func (m *RelabelConfig) Reset() { *m = RelabelConfig{} } |
||||
func (m *RelabelConfig) String() string { return proto.CompactTextString(m) } |
||||
func (*RelabelConfig) ProtoMessage() {} |
||||
|
||||
const Default_RelabelConfig_Separator string = ";" |
||||
const Default_RelabelConfig_Action RelabelConfig_Action = RelabelConfig_REPLACE |
||||
|
||||
func (m *RelabelConfig) GetSourceLabel() []string { |
||||
if m != nil { |
||||
return m.SourceLabel |
||||
} |
||||
return nil |
||||
} |
||||
|
||||
func (m *RelabelConfig) GetRegex() string { |
||||
if m != nil && m.Regex != nil { |
||||
return *m.Regex |
||||
} |
||||
return "" |
||||
} |
||||
|
||||
func (m *RelabelConfig) GetTargetLabel() string { |
||||
if m != nil && m.TargetLabel != nil { |
||||
return *m.TargetLabel |
||||
} |
||||
return "" |
||||
} |
||||
|
||||
func (m *RelabelConfig) GetReplacement() string { |
||||
if m != nil && m.Replacement != nil { |
||||
return *m.Replacement |
||||
} |
||||
return "" |
||||
} |
||||
|
||||
func (m *RelabelConfig) GetSeparator() string { |
||||
if m != nil && m.Separator != nil { |
||||
return *m.Separator |
||||
} |
||||
return Default_RelabelConfig_Separator |
||||
} |
||||
|
||||
func (m *RelabelConfig) GetAction() RelabelConfig_Action { |
||||
if m != nil && m.Action != nil { |
||||
return *m.Action |
||||
} |
||||
return Default_RelabelConfig_Action |
||||
} |
||||
|
||||
// The configuration for a Prometheus job to scrape.
|
||||
//
|
||||
// The next field no. is 11.
|
||||
type ScrapeConfig struct { |
||||
// The job name. Must adhere to the regex "[a-zA-Z_][a-zA-Z0-9_-]*".
|
||||
JobName *string `protobuf:"bytes,1,req,name=job_name" json:"job_name,omitempty"` |
||||
// How frequently to scrape targets from this job. Overrides the global
|
||||
// default. Must be a valid Prometheus duration string in the form
|
||||
// "[0-9]+[smhdwy]".
|
||||
ScrapeInterval *string `protobuf:"bytes,2,opt,name=scrape_interval" json:"scrape_interval,omitempty"` |
||||
// Per-target timeout when scraping this job. Must be a valid Prometheus
|
||||
// duration string in the form "[0-9]+[smhdwy]".
|
||||
ScrapeTimeout *string `protobuf:"bytes,7,opt,name=scrape_timeout,def=10s" json:"scrape_timeout,omitempty"` |
||||
// List of DNS service discovery configurations.
|
||||
DnsConfig []*DNSConfig `protobuf:"bytes,9,rep,name=dns_config" json:"dns_config,omitempty"` |
||||
// List of labeled target groups for this job.
|
||||
TargetGroup []*TargetGroup `protobuf:"bytes,5,rep,name=target_group" json:"target_group,omitempty"` |
||||
// List of relabel configurations.
|
||||
RelabelConfig []*RelabelConfig `protobuf:"bytes,10,rep,name=relabel_config" json:"relabel_config,omitempty"` |
||||
// The HTTP resource path on which to fetch metrics from targets.
|
||||
MetricsPath *string `protobuf:"bytes,6,opt,name=metrics_path,def=/metrics" json:"metrics_path,omitempty"` |
||||
// The URL scheme with which to fetch metrics from targets.
|
||||
Scheme *string `protobuf:"bytes,8,opt,name=scheme,def=http" json:"scheme,omitempty"` |
||||
XXX_unrecognized []byte `json:"-"` |
||||
} |
||||
|
||||
func (m *ScrapeConfig) Reset() { *m = ScrapeConfig{} } |
||||
func (m *ScrapeConfig) String() string { return proto.CompactTextString(m) } |
||||
func (*ScrapeConfig) ProtoMessage() {} |
||||
|
||||
const Default_ScrapeConfig_ScrapeTimeout string = "10s" |
||||
const Default_ScrapeConfig_MetricsPath string = "/metrics" |
||||
const Default_ScrapeConfig_Scheme string = "http" |
||||
|
||||
func (m *ScrapeConfig) GetJobName() string { |
||||
if m != nil && m.JobName != nil { |
||||
return *m.JobName |
||||
} |
||||
return "" |
||||
} |
||||
|
||||
func (m *ScrapeConfig) GetScrapeInterval() string { |
||||
if m != nil && m.ScrapeInterval != nil { |
||||
return *m.ScrapeInterval |
||||
} |
||||
return "" |
||||
} |
||||
|
||||
func (m *ScrapeConfig) GetScrapeTimeout() string { |
||||
if m != nil && m.ScrapeTimeout != nil { |
||||
return *m.ScrapeTimeout |
||||
} |
||||
return Default_ScrapeConfig_ScrapeTimeout |
||||
} |
||||
|
||||
func (m *ScrapeConfig) GetDnsConfig() []*DNSConfig { |
||||
if m != nil { |
||||
return m.DnsConfig |
||||
} |
||||
return nil |
||||
} |
||||
|
||||
func (m *ScrapeConfig) GetTargetGroup() []*TargetGroup { |
||||
if m != nil { |
||||
return m.TargetGroup |
||||
} |
||||
return nil |
||||
} |
||||
|
||||
func (m *ScrapeConfig) GetRelabelConfig() []*RelabelConfig { |
||||
if m != nil { |
||||
return m.RelabelConfig |
||||
} |
||||
return nil |
||||
} |
||||
|
||||
func (m *ScrapeConfig) GetMetricsPath() string { |
||||
if m != nil && m.MetricsPath != nil { |
||||
return *m.MetricsPath |
||||
} |
||||
return Default_ScrapeConfig_MetricsPath |
||||
} |
||||
|
||||
func (m *ScrapeConfig) GetScheme() string { |
||||
if m != nil && m.Scheme != nil { |
||||
return *m.Scheme |
||||
} |
||||
return Default_ScrapeConfig_Scheme |
||||
} |
||||
|
||||
// The top-level Prometheus configuration.
|
||||
type PrometheusConfig struct { |
||||
// Global Prometheus configuration options. If omitted, an empty global
|
||||
// configuration with default values (see GlobalConfig definition) will be
|
||||
// created.
|
||||
Global *GlobalConfig `protobuf:"bytes,1,opt,name=global" json:"global,omitempty"` |
||||
// The list of scrape configs.
|
||||
ScrapeConfig []*ScrapeConfig `protobuf:"bytes,3,rep,name=scrape_config" json:"scrape_config,omitempty"` |
||||
XXX_unrecognized []byte `json:"-"` |
||||
} |
||||
|
||||
func (m *PrometheusConfig) Reset() { *m = PrometheusConfig{} } |
||||
func (m *PrometheusConfig) String() string { return proto.CompactTextString(m) } |
||||
func (*PrometheusConfig) ProtoMessage() {} |
||||
|
||||
func (m *PrometheusConfig) GetGlobal() *GlobalConfig { |
||||
if m != nil { |
||||
return m.Global |
||||
} |
||||
return nil |
||||
} |
||||
|
||||
func (m *PrometheusConfig) GetScrapeConfig() []*ScrapeConfig { |
||||
if m != nil { |
||||
return m.ScrapeConfig |
||||
} |
||||
return nil |
||||
} |
||||
|
||||
func init() { |
||||
proto.RegisterEnum("io.prometheus.RelabelConfig_Action", RelabelConfig_Action_name, RelabelConfig_Action_value) |
||||
} |
||||
@ -1,53 +0,0 @@ |
||||
// Copyright 2013 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package config |
||||
|
||||
import ( |
||||
"io/ioutil" |
||||
|
||||
"github.com/golang/protobuf/proto" |
||||
|
||||
pb "github.com/prometheus/prometheus/config/generated" |
||||
) |
||||
|
||||
// LoadFromString returns a config parsed from the provided string.
|
||||
func LoadFromString(configStr string) (*Config, error) { |
||||
configProto := pb.PrometheusConfig{} |
||||
if err := proto.UnmarshalText(configStr, &configProto); err != nil { |
||||
return nil, err |
||||
} |
||||
if configProto.Global == nil { |
||||
configProto.Global = &pb.GlobalConfig{} |
||||
} |
||||
for _, scfg := range configProto.GetScrapeConfig() { |
||||
if scfg.ScrapeInterval == nil { |
||||
scfg.ScrapeInterval = proto.String(configProto.Global.GetScrapeInterval()) |
||||
} |
||||
} |
||||
|
||||
config := &Config{configProto} |
||||
err := config.Validate() |
||||
|
||||
return config, err |
||||
} |
||||
|
||||
// LoadFromFile returns a config parsed from the file of the provided name.
|
||||
func LoadFromFile(fileName string) (*Config, error) { |
||||
configStr, err := ioutil.ReadFile(fileName) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
return LoadFromString(string(configStr)) |
||||
} |
||||
@ -1,25 +0,0 @@ |
||||
// Copyright 2013 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package config |
||||
|
||||
import ( |
||||
"testing" |
||||
) |
||||
|
||||
func TestLoadFromFile(t *testing.T) { |
||||
_, err := LoadFromFile("file-does-not-exist.conf") |
||||
if err == nil { |
||||
t.Error("Error expected on non-existing config file path but got none") |
||||
} |
||||
} |
||||
@ -0,0 +1,61 @@ |
||||
# my global config |
||||
global_config: |
||||
scrape_interval: 15s |
||||
evaluation_interval: 30s |
||||
# scrape_timeout is set to the global default (10s). |
||||
|
||||
labels: |
||||
monitor: codelab |
||||
foo: bar |
||||
|
||||
rule_files: |
||||
- "first.rules" |
||||
- "second.rules" |
||||
|
||||
scrape_configs: |
||||
- job_name: prometheus |
||||
|
||||
# scrape_interval is defined by the configured global (15s). |
||||
# scrape_timeout is defined by the global default (10s). |
||||
|
||||
# metrics_path defaults to '/metrics' |
||||
# scheme defaults to 'http'. |
||||
|
||||
labels: |
||||
foo: baz |
||||
|
||||
target_groups: |
||||
- targets: ['localhost:9090', 'localhost:9191'] |
||||
labels: |
||||
my: label |
||||
your: label |
||||
|
||||
relabel_configs: |
||||
- source_labels: [job, __meta_dns_srv_name] |
||||
regex: (.*)some-[regex]$ |
||||
target_label: job |
||||
replacement: foo-${1} |
||||
# action defaults to 'replace' |
||||
|
||||
|
||||
- job_name: service-x |
||||
|
||||
scrape_interval: 50s |
||||
scrape_timeout: 5s |
||||
|
||||
metrics_path: /my_path |
||||
# scheme defaults to 'http'. |
||||
|
||||
dns_configs: |
||||
- refresh_interval: 15s |
||||
names: |
||||
- first.dns.address.domain.com |
||||
- second.dns.address.domain.com |
||||
- names: |
||||
- first.dns.address.domain.com |
||||
# refresh_interval defaults to 30s. |
||||
|
||||
relabel_configs: |
||||
- source_labels: [job] |
||||
regex: (.*)some-[regex]$ |
||||
action: drop |
||||
@ -0,0 +1,2 @@ |
||||
scrape_configs: |
||||
- job_name: prom^etheus |
||||
@ -0,0 +1,5 @@ |
||||
# Two scrape configs with the same job names are not allowed. |
||||
scrape_configs: |
||||
- job_name: prometheus |
||||
- job_name: service-x |
||||
- job_name: prometheus |
||||
@ -0,0 +1,3 @@ |
||||
global_config: |
||||
labels: |
||||
not$allowed: value |
||||
@ -0,0 +1,4 @@ |
||||
scrape_configs: |
||||
- job_name: prometheus |
||||
relabel_configs: |
||||
- regex: abc(def |
||||
Loading…
Reference in new issue