Skip to content
Merged
Show file tree
Hide file tree
Changes from 8 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,9 @@ Main (unreleased)

- update promtail converter to use `file_match` block for `loki.source.file` instead of going through `local.file_match`. (@kalleep)

- Added `send_traceparent` option for `tracing` config to enable traceparent header propagation. (@MyDigitalLife)
- Add `send_traceparent` option for `tracing` config to enable traceparent header propagation. (@MyDigitalLife)

- Add support for HTTP service discovery in `prometheus.operator.scrapeconfigs` component using `httpSDConfigs` in ScrapeConfig CRDs. (@QuentinBisson)

### Bugfixes

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,46 @@ You can run {{< param "PRODUCT_NAME" >}} from outside the cluster by supplying c
`scrapeconfigs` may reference secrets for authenticating to targets to scrape them.
In these cases, the secrets are loaded and refreshed only when the ScrapeConfig is updated or when this component refreshes its internal state, which happens on a 5-minute refresh cycle.

## Service Discovery Methods

ScrapeConfig resources support multiple service discovery mechanisms:

### Static Configuration

Static configurations define a fixed list of targets to scrape. This is useful when targets are known in advance and don't change frequently.

### HTTP Service Discovery

HTTP service discovery allows dynamic target discovery by querying an HTTP endpoint that returns target information in JSON format. The endpoint is polled at regular intervals to discover new targets or remove stale ones. This is particularly useful for:

- Dynamic environments where targets are frequently added or removed
- Integration with external service registries
- Custom service discovery implementations

The HTTP endpoint should return a JSON array of target groups, where each target group contains:
- `targets`: Array of `host:port` combinations to scrape
- `labels`: Optional labels to apply to all targets in the group

Example JSON response:
```json
[
{
"targets": ["service1.example.com:8080", "service2.example.com:8080"],
"labels": {
"job": "web-servers",
"env": "production"
}
},
{
"targets": ["db1.example.com:9090"],
"labels": {
"job": "databases",
"env": "production"
}
}
]
```

## Usage

```alloy
Expand Down Expand Up @@ -238,6 +278,74 @@ prometheus.operator.scrapeconfigs "scrapeconfigs" {
}
```

### Static Configuration Example

This example shows a ScrapeConfig resource using static target discovery:

```yaml
apiVersion: monitoring.coreos.com/v1alpha1
kind: ScrapeConfig
metadata:
name: static-targets
namespace: monitoring
spec:
staticConfigs:
- targets:
- "web-server-1.example.com:8080"
- "web-server-2.example.com:8080"
labels:
job: "web-servers"
env: "production"
metricsPath: /metrics
scrapeInterval: 30s
```

### HTTP Service Discovery Example

This example shows a ScrapeConfig resource using HTTP service discovery:

```yaml
apiVersion: monitoring.coreos.com/v1alpha1
kind: ScrapeConfig
metadata:
name: http-discovery
namespace: monitoring
spec:
httpSDConfigs:
- url: "http://service-registry.internal:8080/discover"
refreshInterval: 60s
metricsPath: /metrics
scrapeInterval: 30s
scrapeTimeout: 10s
```

The HTTP endpoint (`http://service-registry.internal:8080/discover`) should return JSON in this format:

```json
[
{
"targets": [
"api-server-1.example.com:8080",
"api-server-2.example.com:8080"
],
"labels": {
"service": "api",
"version": "v1.2.3"
}
},
{
"targets": [
"worker-1.example.com:9090",
"worker-2.example.com:9090"
],
"labels": {
"service": "worker",
"version": "v2.1.0"
}
}
]
```

## Extra Metric Labels

`prometheus.operator.scrapeconfigs` adds the following extra
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,41 +5,46 @@ package configgen
import (
"fmt"
"strings"
"time"

promopv1alpha1 "github.com/prometheus-operator/prometheus-operator/pkg/apis/monitoring/v1alpha1"
"github.com/prometheus-operator/prometheus-operator/pkg/namespacelabeler"
commonConfig "github.com/prometheus/common/config"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/config"
"github.com/prometheus/prometheus/discovery"
"github.com/prometheus/prometheus/discovery/http"
"github.com/prometheus/prometheus/discovery/targetgroup"
"github.com/prometheus/prometheus/model/relabel"
)

func (cg *ConfigGenerator) GenerateScrapeConfigConfigs(m *promopv1alpha1.ScrapeConfig) (cfg []*config.ScrapeConfig, errors []error) {
cfg, errors = cg.generateStaticScrapeConfigConfigs(m, cfg, errors)
return
}

func (cg *ConfigGenerator) generateStaticScrapeConfigConfigs(m *promopv1alpha1.ScrapeConfig, cfg []*config.ScrapeConfig, errors []error) ([]*config.ScrapeConfig, []error) {
for i, ep := range m.Spec.StaticConfigs {
scrapeConfig, err := cg.generateStaticScrapeConfigConfig(m, ep, i)
if err != nil {
if scrapeConfig, err := cg.generateStaticScrapeConfigConfig(m, ep, i); err != nil {
errors = append(errors, err)
} else {
cfg = append(cfg, scrapeConfig)
}
}
for i, ep := range m.Spec.HTTPSDConfigs {
if scrapeConfig, err := cg.generateHTTPScrapeConfigConfig(m, ep, i); err != nil {
errors = append(errors, err)
} else {
cfg = append(cfg, scrapeConfig)
}
}
return cfg, errors
return
}

func (cg *ConfigGenerator) generateStaticScrapeConfigConfig(m *promopv1alpha1.ScrapeConfig, sc promopv1alpha1.StaticConfig, i int) (cfg *config.ScrapeConfig, err error) {
relabels := cg.initRelabelings()
metricRelabels := relabeler{}
cfg, err = cg.commonScrapeConfigConfig(m, i, &relabels, &metricRelabels)
cfg.JobName = fmt.Sprintf("scrapeConfig/%s/%s/static/%d", m.Namespace, m.Name, i)
if err != nil {
return nil, err
}
cfg.JobName = fmt.Sprintf("scrapeConfig/%s/%s/static/%d", m.Namespace, m.Name, i)

targets := []model.LabelSet{}
for _, target := range sc.Targets {
targets = append(targets, model.LabelSet{
Expand All @@ -63,16 +68,57 @@ func (cg *ConfigGenerator) generateStaticScrapeConfigConfig(m *promopv1alpha1.Sc
},
}
cfg.ServiceDiscoveryConfigs = append(cfg.ServiceDiscoveryConfigs, discoveryCfg)
cfg.RelabelConfigs = relabels.configs
cfg.MetricRelabelConfigs = metricRelabels.configs
if m.Spec.ScrapeProtocols != nil {
protocols, err := convertScrapeProtocols(m.Spec.ScrapeProtocols)
return cg.finalizeScrapeConfig(cfg, &relabels, &metricRelabels)
}

func (cg *ConfigGenerator) generateHTTPScrapeConfigConfig(m *promopv1alpha1.ScrapeConfig, httpSD promopv1alpha1.HTTPSDConfig, i int) (cfg *config.ScrapeConfig, err error) {
relabels := cg.initRelabelings()
metricRelabels := relabeler{}
cfg, err = cg.commonScrapeConfigConfig(m, i, &relabels, &metricRelabels)
if err != nil {
return nil, err
}
cfg.JobName = fmt.Sprintf("scrapeConfig/%s/%s/http/%d", m.Namespace, m.Name, i)

// Convert HTTPSDConfig to Prometheus HTTP SD config
httpSDConfig := &http.SDConfig{
HTTPClientConfig: commonConfig.DefaultHTTPClientConfig,
RefreshInterval: model.Duration(30 * time.Second), // Default refresh interval
URL: httpSD.URL,
}

// Set refresh interval if specified
if httpSD.RefreshInterval != nil {
if httpSDConfig.RefreshInterval, err = model.ParseDuration(string(*httpSD.RefreshInterval)); err != nil {
return nil, fmt.Errorf("parsing refresh interval from HTTPSDConfig: %w", err)
}
}

// Add TLS configuration if specified
if httpSD.TLSConfig != nil {
if httpSDConfig.HTTPClientConfig.TLSConfig, err = cg.generateSafeTLS(*httpSD.TLSConfig, m.Namespace); err != nil {
return nil, err
}
}

// Add BasicAuth if specified
if httpSD.BasicAuth != nil {
httpSDConfig.HTTPClientConfig.BasicAuth, err = cg.generateBasicAuth(*httpSD.BasicAuth, m.Namespace)
if err != nil {
return nil, err
}
cfg.ScrapeProtocols = protocols
}
return cfg, cfg.Validate(cg.ScrapeOptions.GlobalConfig())

// Add Authorization if specified
if httpSD.Authorization != nil {
httpSDConfig.HTTPClientConfig.Authorization, err = cg.generateAuthorization(*httpSD.Authorization, m.Namespace)
if err != nil {
return nil, err
}
}

cfg.ServiceDiscoveryConfigs = append(cfg.ServiceDiscoveryConfigs, httpSDConfig)
return cg.finalizeScrapeConfig(cfg, &relabels, &metricRelabels)
}

func (cg *ConfigGenerator) commonScrapeConfigConfig(m *promopv1alpha1.ScrapeConfig, _ int, relabels *relabeler, metricRelabels *relabeler) (cfg *config.ScrapeConfig, err error) {
Expand All @@ -93,6 +139,13 @@ func (cg *ConfigGenerator) commonScrapeConfigConfig(m *promopv1alpha1.ScrapeConf
return nil, fmt.Errorf("parsing timeout from scrapeConfig: %w", err)
}
}
if m.Spec.ScrapeProtocols != nil {
protocols, err := convertScrapeProtocols(m.Spec.ScrapeProtocols)
if err != nil {
return nil, fmt.Errorf("converting scrape protocols: %w", err)
}
cfg.ScrapeProtocols = protocols
}
if m.Spec.MetricsPath != nil {
cfg.MetricsPath = *m.Spec.MetricsPath
}
Expand Down Expand Up @@ -143,3 +196,10 @@ func (cg *ConfigGenerator) commonScrapeConfigConfig(m *promopv1alpha1.ScrapeConf
cfg.LabelValueLengthLimit = uint(defaultIfNil(m.Spec.LabelValueLengthLimit, 0))
return cfg, err
}

// finalizeScrapeConfig applies common finalization steps to a scrape config
func (cg *ConfigGenerator) finalizeScrapeConfig(cfg *config.ScrapeConfig, relabels *relabeler, metricRelabels *relabeler) (*config.ScrapeConfig, error) {
cfg.RelabelConfigs = relabels.configs
cfg.MetricRelabelConfigs = metricRelabels.configs
return cfg, cfg.Validate(cg.ScrapeOptions.GlobalConfig())
}
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ import (
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/config"
"github.com/prometheus/prometheus/discovery"
"github.com/prometheus/prometheus/discovery/http"
"github.com/prometheus/prometheus/discovery/targetgroup"
"github.com/prometheus/prometheus/model/relabel"
"github.com/stretchr/testify/assert"
Expand Down Expand Up @@ -254,3 +255,78 @@ func TestGenerateStaticScrapeConfigConfig(t *testing.T) {
})
}
}

func TestGenerateHTTPScrapeConfigConfig(t *testing.T) {
suite := []struct {
name string
m *promopv1alpha1.ScrapeConfig
ep promopv1alpha1.HTTPSDConfig
expected *config.ScrapeConfig
}{
{
name: "http service discovery",
m: &promopv1alpha1.ScrapeConfig{
ObjectMeta: metav1.ObjectMeta{
Namespace: "test-namespace",
Name: "test-scrapeconfig",
},
Spec: promopv1alpha1.ScrapeConfigSpec{
MetricsPath: ptr.To("/metrics"),
ScrapeInterval: ptr.To(promopv1.Duration("60s")),
},
},
ep: promopv1alpha1.HTTPSDConfig{
URL: "http://example-service.test-namespace:8080/sd",
RefreshInterval: ptr.To(promopv1.Duration("15s")),
},
expected: &config.ScrapeConfig{
JobName: "scrapeConfig/test-namespace/test-scrapeconfig/http/0",
HonorTimestamps: true,
ScrapeInterval: model.Duration(60 * time.Second),
ScrapeTimeout: model.Duration(10 * time.Second),
MetricsPath: "/metrics",
Scheme: "http",
ServiceDiscoveryConfigs: discovery.Configs{
&http.SDConfig{
HTTPClientConfig: commonConfig.DefaultHTTPClientConfig,
RefreshInterval: model.Duration(15 * time.Second),
URL: "http://example-service.test-namespace:8080/sd",
},
},
},
},
}

for _, tc := range suite {
t.Run(tc.name, func(t *testing.T) {
cg := &ConfigGenerator{
Client: &kubernetes.ClientArguments{},
AdditionalRelabelConfigs: []*alloy_relabel.Config{
{TargetLabel: "__meta_foo", Replacement: "bar"},
},
ScrapeOptions: operator.ScrapeOptions{
DefaultScrapeInterval: time.Hour,
DefaultScrapeTimeout: 42 * time.Second,
},
}
got, err := cg.generateHTTPScrapeConfigConfig(tc.m, tc.ep, 0)
require.NoError(t, err)

// Check job name
assert.Equal(t, tc.expected.JobName, got.JobName)

// Check metrics path
assert.Equal(t, tc.expected.MetricsPath, got.MetricsPath)

// Check scrape interval
assert.Equal(t, tc.expected.ScrapeInterval, got.ScrapeInterval)

// Check service discovery configs
require.Len(t, got.ServiceDiscoveryConfigs, 1)
httpSD, ok := got.ServiceDiscoveryConfigs[0].(*http.SDConfig)
require.True(t, ok, "Expected HTTP SD config")
assert.Equal(t, "http://example-service.test-namespace:8080/sd", httpSD.URL)
assert.Equal(t, model.Duration(15*time.Second), httpSD.RefreshInterval)
})
}
}
Loading