Merge remote-tracking branch 'upstream/main' into merge-release-2.30 and upgrade prometheus/common to v0.31.1
Signed-off-by: Ganesh Vernekar <ganeshvern@gmail.com>pull/9410/head
commit
f69e4590fb
@ -0,0 +1,29 @@ |
||||
name: golangci-lint |
||||
on: |
||||
push: |
||||
paths: |
||||
- "go.sum" |
||||
- "go.mod" |
||||
- "**.go" |
||||
- "scripts/errcheck_excludes.txt" |
||||
- ".github/workflows/golangci-lint.yml" |
||||
pull_request: |
||||
paths: |
||||
- "go.sum" |
||||
- "go.mod" |
||||
- "**.go" |
||||
- "scripts/errcheck_excludes.txt" |
||||
- ".github/workflows/golangci-lint.yml" |
||||
|
||||
jobs: |
||||
golangci: |
||||
name: lint |
||||
runs-on: ubuntu-latest |
||||
steps: |
||||
- name: Checkout repository |
||||
uses: actions/checkout@v2 |
||||
|
||||
- name: Lint |
||||
uses: golangci/golangci-lint-action@v2 |
||||
with: |
||||
version: v1.42.0 |
||||
@ -0,0 +1,7 @@ |
||||
FROM gitpod/workspace-full |
||||
|
||||
ENV CUSTOM_NODE_VERSION=16 |
||||
|
||||
RUN bash -c ". .nvm/nvm.sh && nvm install ${CUSTOM_NODE_VERSION} && nvm use ${CUSTOM_NODE_VERSION} && nvm alias default ${CUSTOM_NODE_VERSION}" |
||||
|
||||
RUN echo "nvm use default &>/dev/null" >> ~/.bashrc.d/51-nvm-fix |
||||
@ -0,0 +1,4 @@ |
||||
scrape_configs: |
||||
- puppetdb_sd_configs: |
||||
- url: http:// |
||||
query: 'resources { type = "Package" and title = "httpd" }' |
||||
@ -0,0 +1,3 @@ |
||||
scrape_configs: |
||||
- puppetdb_sd_configs: |
||||
- url: http://puppetserver/ |
||||
@ -0,0 +1,4 @@ |
||||
scrape_configs: |
||||
- puppetdb_sd_configs: |
||||
- url: ftp://puppet |
||||
query: 'resources { type = "Package" and title = "httpd" }' |
||||
@ -0,0 +1,3 @@ |
||||
scrape_configs: |
||||
- puppetdb_sd_configs: |
||||
- query: 'resources { type = "Package" and title = "httpd" }' |
||||
@ -0,0 +1,49 @@ |
||||
[ |
||||
{ |
||||
"certname": "edinburgh.example.com", |
||||
"environment": "prod", |
||||
"exported": false, |
||||
"file": "/etc/puppetlabs/code/environments/prod/modules/upstream/apache/manifests/init.pp", |
||||
"line": 384, |
||||
"parameters": { |
||||
"access_log": true, |
||||
"access_log_file": "ssl_access_log", |
||||
"additional_includes": [ ], |
||||
"directoryindex": "", |
||||
"docroot": "/var/www/html", |
||||
"ensure": "absent", |
||||
"options": [ |
||||
"Indexes", |
||||
"FollowSymLinks", |
||||
"MultiViews" |
||||
], |
||||
"php_flags": { }, |
||||
"labels": { |
||||
"alias": "edinburgh" |
||||
}, |
||||
"scriptaliases": [ |
||||
{ |
||||
"alias": "/cgi-bin", |
||||
"path": "/var/www/cgi-bin" |
||||
} |
||||
] |
||||
}, |
||||
"resource": "49af83866dc5a1518968b68e58a25319107afe11", |
||||
"tags": [ |
||||
"roles::hypervisor", |
||||
"apache", |
||||
"apache::vhost", |
||||
"class", |
||||
"default-ssl", |
||||
"profile_hypervisor", |
||||
"vhost", |
||||
"profile_apache", |
||||
"hypervisor", |
||||
"__node_regexp__edinburgh", |
||||
"roles", |
||||
"node" |
||||
], |
||||
"title": "default-ssl", |
||||
"type": "Apache::Vhost" |
||||
} |
||||
] |
||||
@ -0,0 +1,252 @@ |
||||
// Copyright 2021 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package puppetdb |
||||
|
||||
import ( |
||||
"bytes" |
||||
"context" |
||||
"encoding/json" |
||||
"fmt" |
||||
"io" |
||||
"io/ioutil" |
||||
"net" |
||||
"net/http" |
||||
"net/url" |
||||
"path" |
||||
"regexp" |
||||
"strconv" |
||||
"strings" |
||||
"time" |
||||
|
||||
"github.com/go-kit/log" |
||||
"github.com/pkg/errors" |
||||
"github.com/prometheus/common/config" |
||||
"github.com/prometheus/common/model" |
||||
"github.com/prometheus/common/version" |
||||
|
||||
"github.com/prometheus/prometheus/discovery" |
||||
"github.com/prometheus/prometheus/discovery/refresh" |
||||
"github.com/prometheus/prometheus/discovery/targetgroup" |
||||
) |
||||
|
||||
const ( |
||||
pdbLabel = model.MetaLabelPrefix + "puppetdb_" |
||||
pdbLabelCertname = pdbLabel + "certname" |
||||
pdbLabelResource = pdbLabel + "resource" |
||||
pdbLabelType = pdbLabel + "type" |
||||
pdbLabelTitle = pdbLabel + "title" |
||||
pdbLabelExported = pdbLabel + "exported" |
||||
pdbLabelTags = pdbLabel + "tags" |
||||
pdbLabelFile = pdbLabel + "file" |
||||
pdbLabelEnvironment = pdbLabel + "environment" |
||||
pdbLabelParameter = pdbLabel + "parameter_" |
||||
separator = "," |
||||
) |
||||
|
||||
var ( |
||||
// DefaultSDConfig is the default PuppetDB SD configuration.
|
||||
DefaultSDConfig = SDConfig{ |
||||
RefreshInterval: model.Duration(60 * time.Second), |
||||
Port: 80, |
||||
HTTPClientConfig: config.DefaultHTTPClientConfig, |
||||
} |
||||
matchContentType = regexp.MustCompile(`^(?i:application\/json(;\s*charset=("utf-8"|utf-8))?)$`) |
||||
userAgent = fmt.Sprintf("Prometheus/%s", version.Version) |
||||
) |
||||
|
||||
func init() { |
||||
discovery.RegisterConfig(&SDConfig{}) |
||||
} |
||||
|
||||
// SDConfig is the configuration for PuppetDB based discovery.
|
||||
type SDConfig struct { |
||||
HTTPClientConfig config.HTTPClientConfig `yaml:",inline"` |
||||
RefreshInterval model.Duration `yaml:"refresh_interval,omitempty"` |
||||
URL string `yaml:"url"` |
||||
Query string `yaml:"query"` |
||||
IncludeParameters bool `yaml:"include_parameters"` |
||||
Port int `yaml:"port"` |
||||
} |
||||
|
||||
// Name returns the name of the Config.
|
||||
func (*SDConfig) Name() string { return "puppetdb" } |
||||
|
||||
// NewDiscoverer returns a Discoverer for the Config.
|
||||
func (c *SDConfig) NewDiscoverer(opts discovery.DiscovererOptions) (discovery.Discoverer, error) { |
||||
return NewDiscovery(c, opts.Logger) |
||||
} |
||||
|
||||
// SetDirectory joins any relative file paths with dir.
|
||||
func (c *SDConfig) SetDirectory(dir string) { |
||||
c.HTTPClientConfig.SetDirectory(dir) |
||||
} |
||||
|
||||
// UnmarshalYAML implements the yaml.Unmarshaler interface.
|
||||
func (c *SDConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { |
||||
*c = DefaultSDConfig |
||||
type plain SDConfig |
||||
err := unmarshal((*plain)(c)) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
if c.URL == "" { |
||||
return fmt.Errorf("URL is missing") |
||||
} |
||||
parsedURL, err := url.Parse(c.URL) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
if parsedURL.Scheme != "http" && parsedURL.Scheme != "https" { |
||||
return fmt.Errorf("URL scheme must be 'http' or 'https'") |
||||
} |
||||
if parsedURL.Host == "" { |
||||
return fmt.Errorf("host is missing in URL") |
||||
} |
||||
if c.Query == "" { |
||||
return fmt.Errorf("query missing") |
||||
} |
||||
return nil |
||||
} |
||||
|
||||
// Discovery provides service discovery functionality based
|
||||
// on PuppetDB resources.
|
||||
type Discovery struct { |
||||
*refresh.Discovery |
||||
url string |
||||
query string |
||||
port int |
||||
includeParameters bool |
||||
client *http.Client |
||||
} |
||||
|
||||
// NewDiscovery returns a new PuppetDB discovery for the given config.
|
||||
func NewDiscovery(conf *SDConfig, logger log.Logger) (*Discovery, error) { |
||||
if logger == nil { |
||||
logger = log.NewNopLogger() |
||||
} |
||||
|
||||
client, err := config.NewClientFromConfig(conf.HTTPClientConfig, "http") |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
client.Timeout = time.Duration(conf.RefreshInterval) |
||||
|
||||
u, err := url.Parse(conf.URL) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
u.Path = path.Join(u.Path, "pdb/query/v4") |
||||
|
||||
d := &Discovery{ |
||||
url: u.String(), |
||||
port: conf.Port, |
||||
query: conf.Query, |
||||
includeParameters: conf.IncludeParameters, |
||||
client: client, |
||||
} |
||||
|
||||
d.Discovery = refresh.NewDiscovery( |
||||
logger, |
||||
"http", |
||||
time.Duration(conf.RefreshInterval), |
||||
d.refresh, |
||||
) |
||||
return d, nil |
||||
} |
||||
|
||||
func (d *Discovery) refresh(ctx context.Context) ([]*targetgroup.Group, error) { |
||||
body := struct { |
||||
Query string `json:"query"` |
||||
}{d.query} |
||||
bodyBytes, err := json.Marshal(body) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
req, err := http.NewRequest("POST", d.url, bytes.NewBuffer(bodyBytes)) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
req.Header.Set("User-Agent", userAgent) |
||||
req.Header.Set("Accept", "application/json") |
||||
req.Header.Set("Content-Type", "application/json") |
||||
|
||||
resp, err := d.client.Do(req.WithContext(ctx)) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
defer func() { |
||||
io.Copy(ioutil.Discard, resp.Body) |
||||
resp.Body.Close() |
||||
}() |
||||
|
||||
if resp.StatusCode != http.StatusOK { |
||||
return nil, errors.Errorf("server returned HTTP status %s", resp.Status) |
||||
} |
||||
|
||||
if ct := resp.Header.Get("Content-Type"); !matchContentType.MatchString(ct) { |
||||
return nil, errors.Errorf("unsupported content type %s", resp.Header.Get("Content-Type")) |
||||
} |
||||
|
||||
b, err := ioutil.ReadAll(resp.Body) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
var resources []Resource |
||||
|
||||
if err := json.Unmarshal(b, &resources); err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
tg := &targetgroup.Group{ |
||||
// Use a pseudo-URL as source.
|
||||
Source: d.url + "?query=" + d.query, |
||||
} |
||||
|
||||
for _, resource := range resources { |
||||
labels := model.LabelSet{ |
||||
pdbLabelCertname: model.LabelValue(resource.Certname), |
||||
pdbLabelResource: model.LabelValue(resource.Resource), |
||||
pdbLabelType: model.LabelValue(resource.Type), |
||||
pdbLabelTitle: model.LabelValue(resource.Title), |
||||
pdbLabelExported: model.LabelValue(fmt.Sprintf("%t", resource.Exported)), |
||||
pdbLabelFile: model.LabelValue(resource.File), |
||||
pdbLabelEnvironment: model.LabelValue(resource.Environment), |
||||
} |
||||
|
||||
addr := net.JoinHostPort(resource.Certname, strconv.FormatUint(uint64(d.port), 10)) |
||||
labels[model.AddressLabel] = model.LabelValue(addr) |
||||
|
||||
if len(resource.Tags) > 0 { |
||||
// We surround the separated list with the separator as well. This way regular expressions
|
||||
// in relabeling rules don't have to consider tag positions.
|
||||
tags := separator + strings.Join(resource.Tags, separator) + separator |
||||
labels[pdbLabelTags] = model.LabelValue(tags) |
||||
} |
||||
|
||||
// Parameters are not included by default. This should only be enabled
|
||||
// on select resources as it might expose secrets on the Prometheus UI
|
||||
// for certain resources.
|
||||
if d.includeParameters { |
||||
for k, v := range resource.Parameters.toLabels() { |
||||
labels[pdbLabelParameter+k] = v |
||||
} |
||||
} |
||||
|
||||
tg.Targets = append(tg.Targets, labels) |
||||
} |
||||
|
||||
return []*targetgroup.Group{tg}, nil |
||||
} |
||||
@ -0,0 +1,195 @@ |
||||
// Copyright 2021 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package puppetdb |
||||
|
||||
import ( |
||||
"context" |
||||
"encoding/json" |
||||
"fmt" |
||||
"net/http" |
||||
"net/http/httptest" |
||||
"testing" |
||||
"time" |
||||
|
||||
"github.com/go-kit/log" |
||||
"github.com/prometheus/common/config" |
||||
"github.com/prometheus/common/model" |
||||
"github.com/prometheus/prometheus/discovery/targetgroup" |
||||
"github.com/stretchr/testify/require" |
||||
) |
||||
|
||||
func mockServer(t *testing.T) *httptest.Server { |
||||
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { |
||||
var request struct { |
||||
Query string `json:"query"` |
||||
} |
||||
err := json.NewDecoder(r.Body).Decode(&request) |
||||
if err != nil { |
||||
w.WriteHeader(http.StatusBadRequest) |
||||
return |
||||
} |
||||
http.ServeFile(w, r, "fixtures/"+request.Query+".json") |
||||
})) |
||||
t.Cleanup(ts.Close) |
||||
return ts |
||||
} |
||||
|
||||
func TestPuppetSlashInURL(t *testing.T) { |
||||
tests := map[string]string{ |
||||
"https://puppetserver": "https://puppetserver/pdb/query/v4", |
||||
"https://puppetserver/": "https://puppetserver/pdb/query/v4", |
||||
"http://puppetserver:8080/": "http://puppetserver:8080/pdb/query/v4", |
||||
"http://puppetserver:8080": "http://puppetserver:8080/pdb/query/v4", |
||||
} |
||||
|
||||
for serverURL, apiURL := range tests { |
||||
cfg := SDConfig{ |
||||
HTTPClientConfig: config.DefaultHTTPClientConfig, |
||||
URL: serverURL, |
||||
Query: "vhosts", // This is not a valid PuppetDB query, but it is used by the mock.
|
||||
Port: 80, |
||||
RefreshInterval: model.Duration(30 * time.Second), |
||||
} |
||||
d, err := NewDiscovery(&cfg, log.NewNopLogger()) |
||||
require.NoError(t, err) |
||||
require.Equal(t, apiURL, d.url) |
||||
} |
||||
} |
||||
|
||||
func TestPuppetDBRefresh(t *testing.T) { |
||||
ts := mockServer(t) |
||||
|
||||
cfg := SDConfig{ |
||||
HTTPClientConfig: config.DefaultHTTPClientConfig, |
||||
URL: ts.URL, |
||||
Query: "vhosts", // This is not a valid PuppetDB query, but it is used by the mock.
|
||||
Port: 80, |
||||
RefreshInterval: model.Duration(30 * time.Second), |
||||
} |
||||
|
||||
d, err := NewDiscovery(&cfg, log.NewNopLogger()) |
||||
require.NoError(t, err) |
||||
|
||||
ctx := context.Background() |
||||
tgs, err := d.refresh(ctx) |
||||
require.NoError(t, err) |
||||
|
||||
expectedTargets := []*targetgroup.Group{ |
||||
{ |
||||
Targets: []model.LabelSet{ |
||||
{ |
||||
model.AddressLabel: model.LabelValue("edinburgh.example.com:80"), |
||||
model.LabelName("__meta_puppetdb_certname"): model.LabelValue("edinburgh.example.com"), |
||||
model.LabelName("__meta_puppetdb_environment"): model.LabelValue("prod"), |
||||
model.LabelName("__meta_puppetdb_exported"): model.LabelValue("false"), |
||||
model.LabelName("__meta_puppetdb_file"): model.LabelValue("/etc/puppetlabs/code/environments/prod/modules/upstream/apache/manifests/init.pp"), |
||||
model.LabelName("__meta_puppetdb_resource"): model.LabelValue("49af83866dc5a1518968b68e58a25319107afe11"), |
||||
model.LabelName("__meta_puppetdb_tags"): model.LabelValue(",roles::hypervisor,apache,apache::vhost,class,default-ssl,profile_hypervisor,vhost,profile_apache,hypervisor,__node_regexp__edinburgh,roles,node,"), |
||||
model.LabelName("__meta_puppetdb_title"): model.LabelValue("default-ssl"), |
||||
model.LabelName("__meta_puppetdb_type"): model.LabelValue("Apache::Vhost"), |
||||
}, |
||||
}, |
||||
Source: ts.URL + "/pdb/query/v4?query=vhosts", |
||||
}, |
||||
} |
||||
require.Equal(t, tgs, expectedTargets) |
||||
} |
||||
|
||||
func TestPuppetDBRefreshWithParameters(t *testing.T) { |
||||
ts := mockServer(t) |
||||
|
||||
cfg := SDConfig{ |
||||
HTTPClientConfig: config.DefaultHTTPClientConfig, |
||||
URL: ts.URL, |
||||
Query: "vhosts", // This is not a valid PuppetDB query, but it is used by the mock.
|
||||
Port: 80, |
||||
IncludeParameters: true, |
||||
RefreshInterval: model.Duration(30 * time.Second), |
||||
} |
||||
|
||||
d, err := NewDiscovery(&cfg, log.NewNopLogger()) |
||||
require.NoError(t, err) |
||||
|
||||
ctx := context.Background() |
||||
tgs, err := d.refresh(ctx) |
||||
require.NoError(t, err) |
||||
|
||||
expectedTargets := []*targetgroup.Group{ |
||||
{ |
||||
Targets: []model.LabelSet{ |
||||
{ |
||||
model.AddressLabel: model.LabelValue("edinburgh.example.com:80"), |
||||
model.LabelName("__meta_puppetdb_certname"): model.LabelValue("edinburgh.example.com"), |
||||
model.LabelName("__meta_puppetdb_environment"): model.LabelValue("prod"), |
||||
model.LabelName("__meta_puppetdb_exported"): model.LabelValue("false"), |
||||
model.LabelName("__meta_puppetdb_file"): model.LabelValue("/etc/puppetlabs/code/environments/prod/modules/upstream/apache/manifests/init.pp"), |
||||
model.LabelName("__meta_puppetdb_parameter_access_log"): model.LabelValue("true"), |
||||
model.LabelName("__meta_puppetdb_parameter_access_log_file"): model.LabelValue("ssl_access_log"), |
||||
model.LabelName("__meta_puppetdb_parameter_docroot"): model.LabelValue("/var/www/html"), |
||||
model.LabelName("__meta_puppetdb_parameter_ensure"): model.LabelValue("absent"), |
||||
model.LabelName("__meta_puppetdb_parameter_labels_alias"): model.LabelValue("edinburgh"), |
||||
model.LabelName("__meta_puppetdb_parameter_options"): model.LabelValue("Indexes,FollowSymLinks,MultiViews"), |
||||
model.LabelName("__meta_puppetdb_resource"): model.LabelValue("49af83866dc5a1518968b68e58a25319107afe11"), |
||||
model.LabelName("__meta_puppetdb_tags"): model.LabelValue(",roles::hypervisor,apache,apache::vhost,class,default-ssl,profile_hypervisor,vhost,profile_apache,hypervisor,__node_regexp__edinburgh,roles,node,"), |
||||
model.LabelName("__meta_puppetdb_title"): model.LabelValue("default-ssl"), |
||||
model.LabelName("__meta_puppetdb_type"): model.LabelValue("Apache::Vhost"), |
||||
}, |
||||
}, |
||||
Source: ts.URL + "/pdb/query/v4?query=vhosts", |
||||
}, |
||||
} |
||||
require.Equal(t, tgs, expectedTargets) |
||||
} |
||||
|
||||
func TestPuppetDBInvalidCode(t *testing.T) { |
||||
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { |
||||
w.WriteHeader(http.StatusBadRequest) |
||||
})) |
||||
|
||||
t.Cleanup(ts.Close) |
||||
|
||||
cfg := SDConfig{ |
||||
HTTPClientConfig: config.DefaultHTTPClientConfig, |
||||
URL: ts.URL, |
||||
RefreshInterval: model.Duration(30 * time.Second), |
||||
} |
||||
|
||||
d, err := NewDiscovery(&cfg, log.NewNopLogger()) |
||||
require.NoError(t, err) |
||||
|
||||
ctx := context.Background() |
||||
_, err = d.refresh(ctx) |
||||
require.EqualError(t, err, "server returned HTTP status 400 Bad Request") |
||||
} |
||||
|
||||
func TestPuppetDBInvalidFormat(t *testing.T) { |
||||
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { |
||||
fmt.Fprintln(w, "{}") |
||||
})) |
||||
|
||||
t.Cleanup(ts.Close) |
||||
|
||||
cfg := SDConfig{ |
||||
HTTPClientConfig: config.DefaultHTTPClientConfig, |
||||
URL: ts.URL, |
||||
RefreshInterval: model.Duration(30 * time.Second), |
||||
} |
||||
|
||||
d, err := NewDiscovery(&cfg, log.NewNopLogger()) |
||||
require.NoError(t, err) |
||||
|
||||
ctx := context.Background() |
||||
_, err = d.refresh(ctx) |
||||
require.EqualError(t, err, "unsupported content type text/plain; charset=utf-8") |
||||
} |
||||
@ -0,0 +1,82 @@ |
||||
// Copyright 2021 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package puppetdb |
||||
|
||||
import ( |
||||
"strconv" |
||||
"strings" |
||||
|
||||
"github.com/prometheus/common/model" |
||||
"github.com/prometheus/prometheus/util/strutil" |
||||
) |
||||
|
||||
type Resource struct { |
||||
Certname string `json:"certname"` |
||||
Resource string `json:"resource"` |
||||
Type string `json:"type"` |
||||
Title string `json:"title"` |
||||
Exported bool `json:"exported"` |
||||
Tags []string `json:"tags"` |
||||
File string `json:"file"` |
||||
Environment string `json:"environment"` |
||||
Parameters Parameters `json:"parameters"` |
||||
} |
||||
|
||||
type Parameters map[string]interface{} |
||||
|
||||
func (p *Parameters) toLabels() model.LabelSet { |
||||
labels := model.LabelSet{} |
||||
|
||||
for k, v := range *p { |
||||
var labelValue string |
||||
switch value := v.(type) { |
||||
case string: |
||||
labelValue = value |
||||
case bool: |
||||
labelValue = strconv.FormatBool(value) |
||||
case []string: |
||||
labelValue = separator + strings.Join(value, separator) + separator |
||||
case []interface{}: |
||||
if len(value) == 0 { |
||||
continue |
||||
} |
||||
values := make([]string, len(value)) |
||||
for i, v := range value { |
||||
switch value := v.(type) { |
||||
case string: |
||||
values[i] = value |
||||
case bool: |
||||
values[i] = strconv.FormatBool(value) |
||||
case []string: |
||||
values[i] = separator + strings.Join(value, separator) + separator |
||||
} |
||||
} |
||||
labelValue = strings.Join(values, separator) |
||||
case map[string]interface{}: |
||||
subParameter := Parameters(value) |
||||
prefix := strutil.SanitizeLabelName(k + "_") |
||||
for subk, subv := range subParameter.toLabels() { |
||||
labels[model.LabelName(prefix)+subk] = subv |
||||
} |
||||
default: |
||||
continue |
||||
} |
||||
if labelValue == "" { |
||||
continue |
||||
} |
||||
name := strutil.SanitizeLabelName(k) |
||||
labels[model.LabelName(name)] = model.LabelValue(labelValue) |
||||
} |
||||
return labels |
||||
} |
||||
@ -0,0 +1,40 @@ |
||||
# Prometheus example configuration to be used with PuppetDB. |
||||
|
||||
scrape_configs: |
||||
- job_name: 'puppetdb-node-exporter' |
||||
puppetdb_sd_configs: |
||||
# This example discovers the nodes which have the class Prometheus::Node_exporter. |
||||
- url: https://puppetdb.example.com |
||||
query: 'resources { type = "Class" and title = "Prometheus::Node_exporter" }' |
||||
port: 9100 |
||||
tls_config: |
||||
cert_file: prometheus-public.pem |
||||
key_file: prometheus-private.pem |
||||
ca_file: ca.pem |
||||
|
||||
- job_name: 'puppetdb-scrape-jobs' |
||||
puppetdb_sd_configs: |
||||
# This example uses the Prometheus::Scrape_job |
||||
# exported resources. |
||||
# https://github.com/camptocamp/prometheus-puppetdb-sd |
||||
# This examples is compatible with Prometheus-puppetdb-sd, |
||||
# if the exported Prometheus::Scrape_job only have at most one target. |
||||
- url: https://puppetdb.example.com |
||||
query: 'resources { type = "Prometheus::Scrape_job" and exported = true }' |
||||
include_parameters: true |
||||
tls_config: |
||||
cert_file: prometheus-public.pem |
||||
key_file: prometheus-private.pem |
||||
ca_file: ca.pem |
||||
relabel_configs: |
||||
- source_labels: [__meta_puppetdb_certname] |
||||
target_label: certname |
||||
- source_labels: [__meta_puppetdb_parameter_targets] |
||||
regex: '(.+),?.*' |
||||
replacement: $1 |
||||
target_label: __address__ |
||||
- source_labels: [__meta_puppetdb_parameter_job_name] |
||||
target_label: job |
||||
- regex: '__meta_puppetdb_parameter_labels_(.+)' |
||||
replacement: '$1' |
||||
action: labelmap |
||||
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,101 @@ |
||||
# Testing sin() cos() tan() asin() acos() atan() sinh() cosh() tanh() rad() deg() pi(). |
||||
|
||||
load 5m |
||||
trig{l="x"} 10 |
||||
trig{l="y"} 20 |
||||
trig{l="NaN"} NaN |
||||
|
||||
eval instant at 5m sin(trig) |
||||
{l="x"} -0.5440211108893699 |
||||
{l="y"} 0.9129452507276277 |
||||
{l="NaN"} NaN |
||||
|
||||
eval instant at 5m cos(trig) |
||||
{l="x"} -0.8390715290764524 |
||||
{l="y"} 0.40808206181339196 |
||||
{l="NaN"} NaN |
||||
|
||||
eval instant at 5m tan(trig) |
||||
{l="x"} 0.6483608274590867 |
||||
{l="y"} 2.2371609442247427 |
||||
{l="NaN"} NaN |
||||
|
||||
eval instant at 5m asin(trig - 10.1) |
||||
{l="x"} -0.10016742116155944 |
||||
{l="y"} NaN |
||||
{l="NaN"} NaN |
||||
|
||||
eval instant at 5m acos(trig - 10.1) |
||||
{l="x"} 1.670963747956456 |
||||
{l="y"} NaN |
||||
{l="NaN"} NaN |
||||
|
||||
eval instant at 5m atan(trig) |
||||
{l="x"} 1.4711276743037345 |
||||
{l="y"} 1.5208379310729538 |
||||
{l="NaN"} NaN |
||||
|
||||
eval instant at 5m sinh(trig) |
||||
{l="x"} 11013.232920103324 |
||||
{l="y"} 2.4258259770489514e+08 |
||||
{l="NaN"} NaN |
||||
|
||||
eval instant at 5m cosh(trig) |
||||
{l="x"} 11013.232920103324 |
||||
{l="y"} 2.4258259770489514e+08 |
||||
{l="NaN"} NaN |
||||
|
||||
eval instant at 5m tanh(trig) |
||||
{l="x"} 0.9999999958776927 |
||||
{l="y"} 1 |
||||
{l="NaN"} NaN |
||||
|
||||
eval instant at 5m asinh(trig) |
||||
{l="x"} 2.99822295029797 |
||||
{l="y"} 3.6895038689889055 |
||||
{l="NaN"} NaN |
||||
|
||||
eval instant at 5m acosh(trig) |
||||
{l="x"} 2.993222846126381 |
||||
{l="y"} 3.6882538673612966 |
||||
{l="NaN"} NaN |
||||
|
||||
eval instant at 5m atanh(trig - 10.1) |
||||
{l="x"} -0.10033534773107522 |
||||
{l="y"} NaN |
||||
{l="NaN"} NaN |
||||
|
||||
eval instant at 5m rad(trig) |
||||
{l="x"} 0.17453292519943295 |
||||
{l="y"} 0.3490658503988659 |
||||
{l="NaN"} NaN |
||||
|
||||
eval instant at 5m rad(trig - 10) |
||||
{l="x"} 0 |
||||
{l="y"} 0.17453292519943295 |
||||
{l="NaN"} NaN |
||||
|
||||
eval instant at 5m rad(trig - 20) |
||||
{l="x"} -0.17453292519943295 |
||||
{l="y"} 0 |
||||
{l="NaN"} NaN |
||||
|
||||
eval instant at 5m deg(trig) |
||||
{l="x"} 572.9577951308232 |
||||
{l="y"} 1145.9155902616465 |
||||
{l="NaN"} NaN |
||||
|
||||
eval instant at 5m deg(trig - 10) |
||||
{l="x"} 0 |
||||
{l="y"} 572.9577951308232 |
||||
{l="NaN"} NaN |
||||
|
||||
eval instant at 5m deg(trig - 20) |
||||
{l="x"} -572.9577951308232 |
||||
{l="y"} 0 |
||||
{l="NaN"} NaN |
||||
|
||||
clear |
||||
|
||||
eval instant at 0s pi() |
||||
3.141592653589793 |
||||
@ -1,18 +0,0 @@ |
||||
#!/usr/bin/env bash |
||||
# |
||||
# Build React web UI. |
||||
# Run from repository root. |
||||
set -e |
||||
set -u |
||||
|
||||
if ! [[ "$0" =~ "scripts/build_react_app.sh" ]]; then |
||||
echo "must be run from repository root" |
||||
exit 255 |
||||
fi |
||||
|
||||
cd web/ui/react-app |
||||
|
||||
echo "building React app" |
||||
PUBLIC_URL=. npm run build |
||||
rm -rf ../static/react |
||||
mv build ../static/react |
||||
@ -0,0 +1,135 @@ |
||||
#!/usr/bin/env bash |
||||
# vim: ts=2 et |
||||
# Setting -x is absolutely forbidden as it could leak the GitHub token. |
||||
set -uo pipefail |
||||
|
||||
# GITHUB_TOKEN required scope: repo.repo_public |
||||
|
||||
git_mail="prometheus-team@googlegroups.com" |
||||
git_user="prombot" |
||||
branch="repo_sync_codemirror" |
||||
commit_msg="Update codemirror" |
||||
pr_title="Synchronize codemirror from prometheus/prometheus" |
||||
pr_msg="Propagating changes from prometheus/prometheus default branch." |
||||
target_repo="prometheus-community/codemirror-promql" |
||||
source_path="web/ui/module/codemirror-promql" |
||||
|
||||
color_red='\e[31m' |
||||
color_green='\e[32m' |
||||
color_yellow='\e[33m' |
||||
color_none='\e[0m' |
||||
|
||||
echo_red() { |
||||
echo -e "${color_red}$@${color_none}" 1>&2 |
||||
} |
||||
|
||||
echo_green() { |
||||
echo -e "${color_green}$@${color_none}" 1>&2 |
||||
} |
||||
|
||||
echo_yellow() { |
||||
echo -e "${color_yellow}$@${color_none}" 1>&2 |
||||
} |
||||
|
||||
GITHUB_TOKEN="${GITHUB_TOKEN:-}" |
||||
if [ -z "${GITHUB_TOKEN}" ]; then |
||||
echo_red 'GitHub token (GITHUB_TOKEN) not set. Terminating.' |
||||
exit 1 |
||||
fi |
||||
|
||||
# List of files that should not be synced. |
||||
excluded_files="CODE_OF_CONDUCT.md LICENSE Makefile.common SECURITY.md .yamllint MAINTAINERS.md" |
||||
excluded_dirs=".github .circleci" |
||||
|
||||
# Go to the root of the repo |
||||
cd "$(git rev-parse --show-cdup)" || exit 1 |
||||
|
||||
source_dir="$(pwd)/${source_path}" |
||||
|
||||
tmp_dir="$(mktemp -d)" |
||||
trap 'rm -rf "${tmp_dir}"' EXIT |
||||
|
||||
## Internal functions |
||||
github_api() { |
||||
local url |
||||
url="https://api.github.com/${1}" |
||||
shift 1 |
||||
curl --retry 5 --silent --fail -u "${git_user}:${GITHUB_TOKEN}" "${url}" "$@" |
||||
} |
||||
|
||||
get_default_branch() { |
||||
github_api "repos/${1}" 2> /dev/null | |
||||
jq -r .default_branch |
||||
} |
||||
|
||||
push_branch() { |
||||
local git_url |
||||
git_url="https://${git_user}:${GITHUB_TOKEN}@github.com/${1}" |
||||
# stdout and stderr are redirected to /dev/null otherwise git-push could leak |
||||
# the token in the logs. |
||||
# Delete the remote branch in case it was merged but not deleted. |
||||
git push --quiet "${git_url}" ":${branch}" 1>/dev/null 2>&1 |
||||
git push --quiet "${git_url}" --set-upstream "${branch}" 1>/dev/null 2>&1 |
||||
} |
||||
|
||||
post_pull_request() { |
||||
local repo="$1" |
||||
local default_branch="$2" |
||||
local post_json |
||||
post_json="$(printf '{"title":"%s","base":"%s","head":"%s","body":"%s"}' "${pr_title}" "${default_branch}" "${branch}" "${pr_msg}")" |
||||
echo "Posting PR to ${default_branch} on ${repo}" |
||||
github_api "repos/${repo}/pulls" --data "${post_json}" --show-error | |
||||
jq -r '"PR URL " + .html_url' |
||||
} |
||||
|
||||
process_repo() { |
||||
local org_repo |
||||
local default_branch |
||||
org_repo="$1" |
||||
mkdir -p "${tmp_dir}/${org_repo}" |
||||
echo_green "Processing '${org_repo}'" |
||||
|
||||
default_branch="$(get_default_branch "${org_repo}")" |
||||
if [[ -z "${default_branch}" ]]; then |
||||
echo "Can't get the default branch." |
||||
return |
||||
fi |
||||
echo "Default branch: ${default_branch}" |
||||
|
||||
# Clone target repo to temporary directory and checkout to new branch |
||||
git clone --quiet "https://github.com/${org_repo}.git" "${tmp_dir}/${org_repo}" |
||||
cd "${tmp_dir}/${org_repo}" || return 1 |
||||
git checkout -b "${branch}" || return 1 |
||||
|
||||
git rm -r . |
||||
|
||||
cp -ra ${source_dir}/. . |
||||
git add . |
||||
|
||||
for excluded_dir in ${excluded_dirs}; do |
||||
git reset -- "${excluded_dir}/*" |
||||
git checkout -- "${excluded_dir}/*" |
||||
done |
||||
|
||||
for excluded_file in ${excluded_files}; do |
||||
git reset -- "${excluded_file}" |
||||
git checkout -- "${excluded_file}" |
||||
done |
||||
|
||||
if [[ -n "$(git status --porcelain)" ]]; then |
||||
git config user.email "${git_mail}" |
||||
git config user.name "${git_user}" |
||||
git add . |
||||
git commit -s -m "${commit_msg}" |
||||
if push_branch "${org_repo}"; then |
||||
if ! post_pull_request "${org_repo}" "${default_branch}"; then |
||||
return 1 |
||||
fi |
||||
else |
||||
echo "Pushing ${branch} to ${org_repo} failed" |
||||
return 1 |
||||
fi |
||||
fi |
||||
} |
||||
|
||||
process_repo ${target_repo} |
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue