Skip to content
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,9 @@ Main (unreleased)

- update promtail converter to use `file_match` block for `loki.source.file` instead of going through `local.file_match`. (@kalleep)

- Added `send_traceparent` option for `tracing` config to enable traceparent header propagation. (@MyDigitalLife)
- Add `send_traceparent` option for `tracing` config to enable traceparent header propagation. (@MyDigitalLife)

- Add support for HTTP service discovery in `prometheus.operator.scrapeconfigs` component using `httpSDConfigs` in ScrapeConfig CRDs. (@QuentinBisson)

### Bugfixes

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,41 +5,46 @@ package configgen
import (
"fmt"
"strings"
"time"

promopv1alpha1 "github.com/prometheus-operator/prometheus-operator/pkg/apis/monitoring/v1alpha1"
"github.com/prometheus-operator/prometheus-operator/pkg/namespacelabeler"
commonConfig "github.com/prometheus/common/config"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/config"
"github.com/prometheus/prometheus/discovery"
"github.com/prometheus/prometheus/discovery/http"
"github.com/prometheus/prometheus/discovery/targetgroup"
"github.com/prometheus/prometheus/model/relabel"
)

func (cg *ConfigGenerator) GenerateScrapeConfigConfigs(m *promopv1alpha1.ScrapeConfig) (cfg []*config.ScrapeConfig, errors []error) {
cfg, errors = cg.generateStaticScrapeConfigConfigs(m, cfg, errors)
return
}

func (cg *ConfigGenerator) generateStaticScrapeConfigConfigs(m *promopv1alpha1.ScrapeConfig, cfg []*config.ScrapeConfig, errors []error) ([]*config.ScrapeConfig, []error) {
for i, ep := range m.Spec.StaticConfigs {
scrapeConfig, err := cg.generateStaticScrapeConfigConfig(m, ep, i)
if err != nil {
if scrapeConfig, err := cg.generateStaticScrapeConfigConfig(m, ep, i); err != nil {
errors = append(errors, err)
} else {
cfg = append(cfg, scrapeConfig)
}
}
for i, ep := range m.Spec.HTTPSDConfigs {
if scrapeConfig, err := cg.generateHTTPScrapeConfigConfig(m, ep, i); err != nil {
errors = append(errors, err)
} else {
cfg = append(cfg, scrapeConfig)
}
}
return cfg, errors
return
}

func (cg *ConfigGenerator) generateStaticScrapeConfigConfig(m *promopv1alpha1.ScrapeConfig, sc promopv1alpha1.StaticConfig, i int) (cfg *config.ScrapeConfig, err error) {
relabels := cg.initRelabelings()
metricRelabels := relabeler{}
cfg, err = cg.commonScrapeConfigConfig(m, i, &relabels, &metricRelabels)
cfg.JobName = fmt.Sprintf("scrapeConfig/%s/%s/static/%d", m.Namespace, m.Name, i)
if err != nil {
return nil, err
}
cfg.JobName = fmt.Sprintf("scrapeConfig/%s/%s/static/%d", m.Namespace, m.Name, i)

targets := []model.LabelSet{}
for _, target := range sc.Targets {
targets = append(targets, model.LabelSet{
Expand All @@ -63,16 +68,57 @@ func (cg *ConfigGenerator) generateStaticScrapeConfigConfig(m *promopv1alpha1.Sc
},
}
cfg.ServiceDiscoveryConfigs = append(cfg.ServiceDiscoveryConfigs, discoveryCfg)
cfg.RelabelConfigs = relabels.configs
cfg.MetricRelabelConfigs = metricRelabels.configs
if m.Spec.ScrapeProtocols != nil {
protocols, err := convertScrapeProtocols(m.Spec.ScrapeProtocols)
return cg.finalizeScrapeConfig(cfg, m, &relabels, &metricRelabels)
}

func (cg *ConfigGenerator) generateHTTPScrapeConfigConfig(m *promopv1alpha1.ScrapeConfig, httpSD promopv1alpha1.HTTPSDConfig, i int) (cfg *config.ScrapeConfig, err error) {
relabels := cg.initRelabelings()
metricRelabels := relabeler{}
cfg, err = cg.commonScrapeConfigConfig(m, i, &relabels, &metricRelabels)
if err != nil {
return nil, err
}
cfg.JobName = fmt.Sprintf("scrapeConfig/%s/%s/http/%d", m.Namespace, m.Name, i)

// Convert HTTPSDConfig to Prometheus HTTP SD config
httpSDConfig := &http.SDConfig{
HTTPClientConfig: commonConfig.DefaultHTTPClientConfig,
RefreshInterval: model.Duration(30 * time.Second), // Default refresh interval
URL: httpSD.URL,
}

// Set refresh interval if specified
if httpSD.RefreshInterval != nil {
if httpSDConfig.RefreshInterval, err = model.ParseDuration(string(*httpSD.RefreshInterval)); err != nil {
return nil, fmt.Errorf("parsing refresh interval from HTTPSDConfig: %w", err)
}
}

// Add TLS configuration if specified
if httpSD.TLSConfig != nil {
if httpSDConfig.HTTPClientConfig.TLSConfig, err = cg.generateSafeTLS(*httpSD.TLSConfig, m.Namespace); err != nil {
return nil, err
}
}

// Add BasicAuth if specified
if httpSD.BasicAuth != nil {
httpSDConfig.HTTPClientConfig.BasicAuth, err = cg.generateBasicAuth(*httpSD.BasicAuth, m.Namespace)
if err != nil {
return nil, err
}
cfg.ScrapeProtocols = protocols
}
return cfg, cfg.Validate(cg.ScrapeOptions.GlobalConfig())

// Add Authorization if specified
if httpSD.Authorization != nil {
httpSDConfig.HTTPClientConfig.Authorization, err = cg.generateAuthorization(*httpSD.Authorization, m.Namespace)
if err != nil {
return nil, err
}
}

cfg.ServiceDiscoveryConfigs = append(cfg.ServiceDiscoveryConfigs, httpSDConfig)
return cg.finalizeScrapeConfig(cfg, m, &relabels, &metricRelabels)
}

func (cg *ConfigGenerator) commonScrapeConfigConfig(m *promopv1alpha1.ScrapeConfig, _ int, relabels *relabeler, metricRelabels *relabeler) (cfg *config.ScrapeConfig, err error) {
Expand All @@ -93,6 +139,13 @@ func (cg *ConfigGenerator) commonScrapeConfigConfig(m *promopv1alpha1.ScrapeConf
return nil, fmt.Errorf("parsing timeout from scrapeConfig: %w", err)
}
}
if m.Spec.ScrapeProtocols != nil {
protocols, err := convertScrapeProtocols(m.Spec.ScrapeProtocols)
if err != nil {
return nil, fmt.Errorf("converting scrape protocols: %w", err)
}
cfg.ScrapeProtocols = protocols
}
if m.Spec.MetricsPath != nil {
cfg.MetricsPath = *m.Spec.MetricsPath
}
Expand Down Expand Up @@ -143,3 +196,10 @@ func (cg *ConfigGenerator) commonScrapeConfigConfig(m *promopv1alpha1.ScrapeConf
cfg.LabelValueLengthLimit = uint(defaultIfNil(m.Spec.LabelValueLengthLimit, 0))
return cfg, err
}

// finalizeScrapeConfig applies common finalization steps to a scrape config
func (cg *ConfigGenerator) finalizeScrapeConfig(cfg *config.ScrapeConfig, m *promopv1alpha1.ScrapeConfig, relabels *relabeler, metricRelabels *relabeler) (*config.ScrapeConfig, error) {
cfg.RelabelConfigs = relabels.configs
cfg.MetricRelabelConfigs = metricRelabels.configs
return cfg, cfg.Validate(cg.ScrapeOptions.GlobalConfig())
}
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ import (
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/config"
"github.com/prometheus/prometheus/discovery"
"github.com/prometheus/prometheus/discovery/http"
"github.com/prometheus/prometheus/discovery/targetgroup"
"github.com/prometheus/prometheus/model/relabel"
"github.com/stretchr/testify/assert"
Expand Down Expand Up @@ -254,3 +255,78 @@ func TestGenerateStaticScrapeConfigConfig(t *testing.T) {
})
}
}

func TestGenerateHTTPScrapeConfigConfig(t *testing.T) {
suite := []struct {
name string
m *promopv1alpha1.ScrapeConfig
ep promopv1alpha1.HTTPSDConfig
expected *config.ScrapeConfig
}{
{
name: "http service discovery",
m: &promopv1alpha1.ScrapeConfig{
ObjectMeta: metav1.ObjectMeta{
Namespace: "test-namespace",
Name: "test-scrapeconfig",
},
Spec: promopv1alpha1.ScrapeConfigSpec{
MetricsPath: ptr.To("/metrics"),
ScrapeInterval: ptr.To(promopv1.Duration("60s")),
},
},
ep: promopv1alpha1.HTTPSDConfig{
URL: "http://example-service.test-namespace:8080/sd",
RefreshInterval: ptr.To(promopv1.Duration("15s")),
},
expected: &config.ScrapeConfig{
JobName: "scrapeConfig/test-namespace/test-scrapeconfig/http/0",
HonorTimestamps: true,
ScrapeInterval: model.Duration(60 * time.Second),
ScrapeTimeout: model.Duration(10 * time.Second),
MetricsPath: "/metrics",
Scheme: "http",
ServiceDiscoveryConfigs: discovery.Configs{
&http.SDConfig{
HTTPClientConfig: commonConfig.DefaultHTTPClientConfig,
RefreshInterval: model.Duration(15 * time.Second),
URL: "http://example-service.test-namespace:8080/sd",
},
},
},
},
}

for _, tc := range suite {
t.Run(tc.name, func(t *testing.T) {
cg := &ConfigGenerator{
Client: &kubernetes.ClientArguments{},
AdditionalRelabelConfigs: []*alloy_relabel.Config{
{TargetLabel: "__meta_foo", Replacement: "bar"},
},
ScrapeOptions: operator.ScrapeOptions{
DefaultScrapeInterval: time.Hour,
DefaultScrapeTimeout: 42 * time.Second,
},
}
got, err := cg.generateHTTPScrapeConfigConfig(tc.m, tc.ep, 0)
require.NoError(t, err)

// Check job name
assert.Equal(t, tc.expected.JobName, got.JobName)

// Check metrics path
assert.Equal(t, tc.expected.MetricsPath, got.MetricsPath)

// Check scrape interval
assert.Equal(t, tc.expected.ScrapeInterval, got.ScrapeInterval)

// Check service discovery configs
require.Len(t, got.ServiceDiscoveryConfigs, 1)
httpSD, ok := got.ServiceDiscoveryConfigs[0].(*http.SDConfig)
require.True(t, ok, "Expected HTTP SD config")
assert.Equal(t, "http://example-service.test-namespace:8080/sd", httpSD.URL)
assert.Equal(t, model.Duration(15*time.Second), httpSD.RefreshInterval)
})
}
}