From efbf3952d26511a75a18b6c60eb146fb7c0e109e Mon Sep 17 00:00:00 2001 From: Donal O'Sullivan Date: Wed, 21 Jan 2026 10:53:52 +0000 Subject: [PATCH 01/11] Add scraperID to error logs for scraper This supliments the error logs for scrapers with the scraperID that failed. Currently when the scraper experiences an error there is no information in the error logs which tells the user what scraper errored. This fix adds the scraper id, for example if the memory scraper errors the logs now have "scraper": "memory". --- scraper/scraperhelper/obs_metrics.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scraper/scraperhelper/obs_metrics.go b/scraper/scraperhelper/obs_metrics.go index 066baec2694..b2168292915 100644 --- a/scraper/scraperhelper/obs_metrics.go +++ b/scraper/scraperhelper/obs_metrics.go @@ -58,7 +58,7 @@ func wrapObsMetrics(sc scraper.Metrics, receiverID, scraperID component.ID, set numScrapedMetrics := 0 numErroredMetrics := 0 if err != nil { - set.Logger.Error("Error scraping metrics", zap.Error(err)) + set.Logger.Error("Error scraping metrics", zap.String("scraper", scraperID.String()), zap.Error(err)) var partialErr scrapererror.PartialScrapeError if errors.As(err, &partialErr) { numErroredMetrics = partialErr.Failed From d3e77b0b9913045e72fe71c4185f88a8862ae71e Mon Sep 17 00:00:00 2001 From: Donal O'Sullivan Date: Wed, 21 Jan 2026 11:45:26 +0000 Subject: [PATCH 02/11] Update obs logs and obs xscraperhelper obs profiles with scraperID Here we also provide the scraperID to the obs logs and xscraperhelper obs profile, tests have also been added --- scraper/scraperhelper/obs_logs.go | 2 +- scraper/scraperhelper/obs_logs_test.go | 26 +++++++++++++++++++ scraper/scraperhelper/obs_metrics_test.go | 26 +++++++++++++++++++ .../xscraperhelper/obs_profiles.go | 2 +- .../xscraperhelper/obs_profiles_test.go | 26 +++++++++++++++++++ 5 files changed, 80 insertions(+), 2 deletions(-) diff --git a/scraper/scraperhelper/obs_logs.go b/scraper/scraperhelper/obs_logs.go index 5e22e752258..cfaee760d72 100644 --- a/scraper/scraperhelper/obs_logs.go +++ b/scraper/scraperhelper/obs_logs.go @@ -50,7 +50,7 @@ func wrapObsLogs(sc scraper.Logs, receiverID, scraperID component.ID, set compon numScrapedLogs := 0 numErroredLogs := 0 if err != nil { - set.Logger.Error("Error scraping logs", zap.Error(err)) + set.Logger.Error("Error scraping logs", zap.String("scraper", scraperID.String()), zap.Error(err)) var partialErr scrapererror.PartialScrapeError if errors.As(err, &partialErr) { numErroredLogs = partialErr.Failed diff --git a/scraper/scraperhelper/obs_logs_test.go b/scraper/scraperhelper/obs_logs_test.go index 454bf51f947..78996f952f8 100644 --- a/scraper/scraperhelper/obs_logs_test.go +++ b/scraper/scraperhelper/obs_logs_test.go @@ -14,6 +14,8 @@ import ( "go.opentelemetry.io/otel/codes" "go.opentelemetry.io/otel/sdk/metric/metricdata" "go.opentelemetry.io/otel/sdk/metric/metricdata/metricdatatest" + "go.uber.org/zap" + "go.uber.org/zap/zaptest/observer" "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/component/componenttest" @@ -97,6 +99,30 @@ func TestCheckScraperLogs(t *testing.T) { checkScraperLogs(t, tel, receiverID, scraperID, 7, 0) } +func TestScrapeLogsDataOp_LogsScraperID(t *testing.T) { + tel := componenttest.NewTelemetry() + t.Cleanup(func() { require.NoError(t, tel.Shutdown(context.Background())) }) + + core, observedLogs := observer.New(zap.ErrorLevel) + set := tel.NewTelemetrySettings() + set.Logger = zap.New(core) + + sm, err := scraper.NewLogs(func(context.Context) (plog.Logs, error) { + return plog.NewLogs(), errFake + }) + require.NoError(t, err) + sf, err := wrapObsLogs(sm, receiverID, scraperID, set) + require.NoError(t, err) + _, err = sf.ScrapeLogs(context.Background()) + require.ErrorIs(t, err, errFake) + + errorLogs := observedLogs.FilterLevelExact(zap.ErrorLevel).All() + require.Len(t, errorLogs, 1) + assert.Equal(t, "Error scraping logs", errorLogs[0].Message) + assert.Equal(t, scraperID.String(), errorLogs[0].ContextMap()["scraper"]) + assert.Equal(t, errFake.Error(), errorLogs[0].ContextMap()["error"]) +} + func checkScraperLogs(t *testing.T, tel *componenttest.Telemetry, receiver, scraper component.ID, scrapedLogRecords, erroredLogRecords int64) { metadatatest.AssertEqualScraperScrapedLogRecords(t, tel, []metricdata.DataPoint[int64]{ diff --git a/scraper/scraperhelper/obs_metrics_test.go b/scraper/scraperhelper/obs_metrics_test.go index 2a9cf4e989c..0a12cf08c96 100644 --- a/scraper/scraperhelper/obs_metrics_test.go +++ b/scraper/scraperhelper/obs_metrics_test.go @@ -14,6 +14,8 @@ import ( "go.opentelemetry.io/otel/codes" "go.opentelemetry.io/otel/sdk/metric/metricdata" "go.opentelemetry.io/otel/sdk/metric/metricdata/metricdatatest" + "go.uber.org/zap" + "go.uber.org/zap/zaptest/observer" "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/component/componenttest" @@ -110,6 +112,30 @@ func TestCheckScraperMetrics(t *testing.T) { checkScraperMetrics(t, tel, receiverID, scraperID, 7, 0) } +func TestScrapeMetricsDataOp_LogsScraperID(t *testing.T) { + tel := componenttest.NewTelemetry() + t.Cleanup(func() { require.NoError(t, tel.Shutdown(context.Background())) }) + + core, observedLogs := observer.New(zap.ErrorLevel) + set := tel.NewTelemetrySettings() + set.Logger = zap.New(core) + + sm, err := scraper.NewMetrics(func(context.Context) (pmetric.Metrics, error) { + return pmetric.NewMetrics(), errFake + }) + require.NoError(t, err) + sf, err := wrapObsMetrics(sm, receiverID, scraperID, set) + require.NoError(t, err) + _, err = sf.ScrapeMetrics(context.Background()) + require.ErrorIs(t, err, errFake) + + errorLogs := observedLogs.FilterLevelExact(zap.ErrorLevel).All() + require.Len(t, errorLogs, 1) + assert.Equal(t, "Error scraping metrics", errorLogs[0].Message) + assert.Equal(t, scraperID.String(), errorLogs[0].ContextMap()["scraper"]) + assert.Equal(t, errFake.Error(), errorLogs[0].ContextMap()["error"]) +} + func checkScraperMetrics(t *testing.T, tt *componenttest.Telemetry, receiver, scraper component.ID, scrapedMetricPoints, erroredMetricPoints int64) { metadatatest.AssertEqualScraperScrapedMetricPoints(t, tt, []metricdata.DataPoint[int64]{ diff --git a/scraper/scraperhelper/xscraperhelper/obs_profiles.go b/scraper/scraperhelper/xscraperhelper/obs_profiles.go index 084600c24a9..0ca3619fd3a 100644 --- a/scraper/scraperhelper/xscraperhelper/obs_profiles.go +++ b/scraper/scraperhelper/xscraperhelper/obs_profiles.go @@ -52,7 +52,7 @@ func wrapObsProfiles(sc xscraper.Profiles, receiverID, scraperID component.ID, s numScrapedProfiles := 0 numErroredProfiles := 0 if err != nil { - set.Logger.Error("Error scraping profiles", zap.Error(err)) + set.Logger.Error("Error scraping profiles", zap.String("scraper", scraperID.String()), zap.Error(err)) var partialErr scrapererror.PartialScrapeError if errors.As(err, &partialErr) { numErroredProfiles = partialErr.Failed diff --git a/scraper/scraperhelper/xscraperhelper/obs_profiles_test.go b/scraper/scraperhelper/xscraperhelper/obs_profiles_test.go index bf90b3a875b..73e59fdc7c8 100644 --- a/scraper/scraperhelper/xscraperhelper/obs_profiles_test.go +++ b/scraper/scraperhelper/xscraperhelper/obs_profiles_test.go @@ -15,6 +15,8 @@ import ( "go.opentelemetry.io/otel/codes" "go.opentelemetry.io/otel/sdk/metric/metricdata" "go.opentelemetry.io/otel/sdk/metric/metricdata/metricdatatest" + "go.uber.org/zap" + "go.uber.org/zap/zaptest/observer" "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/component/componenttest" @@ -112,6 +114,30 @@ func TestCheckScraperProfiles(t *testing.T) { checkScraperProfiles(t, tel, receiverID, scraperID, 7, 0) } +func TestScrapeProfilesDataOp_LogsScraperID(t *testing.T) { + tel := componenttest.NewTelemetry() + t.Cleanup(func() { require.NoError(t, tel.Shutdown(context.Background())) }) + + core, observedLogs := observer.New(zap.ErrorLevel) + set := tel.NewTelemetrySettings() + set.Logger = zap.New(core) + + sm, err := xscraper.NewProfiles(func(context.Context) (pprofile.Profiles, error) { + return pprofile.NewProfiles(), errFake + }) + require.NoError(t, err) + sf, err := wrapObsProfiles(sm, receiverID, scraperID, set) + require.NoError(t, err) + _, err = sf.ScrapeProfiles(context.Background()) + require.ErrorIs(t, err, errFake) + + errorLogs := observedLogs.FilterLevelExact(zap.ErrorLevel).All() + require.Len(t, errorLogs, 1) + assert.Equal(t, "Error scraping profiles", errorLogs[0].Message) + assert.Equal(t, scraperID.String(), errorLogs[0].ContextMap()["scraper"]) + assert.Equal(t, errFake.Error(), errorLogs[0].ContextMap()["error"]) +} + func checkScraperProfiles(t *testing.T, tel *componenttest.Telemetry, receiver, scraper component.ID, scrapedProfileRecords, erroredProfileRecords int64) { metadatatest.AssertEqualScraperScrapedProfileRecords(t, tel, []metricdata.DataPoint[int64]{ From a084c1893b56e2558a3f69fc1c9e24e1fea275ed Mon Sep 17 00:00:00 2001 From: Donal O'Sullivan Date: Wed, 21 Jan 2026 14:35:38 +0000 Subject: [PATCH 03/11] Add chlog for adding ScraperID to scraper error logs --- .chloggen/add-scraperID-to-error-logs.yaml | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 .chloggen/add-scraperID-to-error-logs.yaml diff --git a/.chloggen/add-scraperID-to-error-logs.yaml b/.chloggen/add-scraperID-to-error-logs.yaml new file mode 100644 index 00000000000..08f26db9926 --- /dev/null +++ b/.chloggen/add-scraperID-to-error-logs.yaml @@ -0,0 +1,13 @@ +# Use this changelog template to create an entry for release notes. + +# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix' +change_type: enhancement + +# The name of the component, or a single word describing the area of concern, (e.g. receiver/otlp) +component: scraper/scraperhelper + +# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`). +note: ScraperID has been added to the error logs for metrics, logs, and profiler + +# One or more tracking issues or pull requests related to the change +issues: [https://github.com/open-telemetry/opentelemetry-collector-contrib/issues/35814] From 28ed8da414d9da2c6afc0b8e73cd9dc7fdf6be62 Mon Sep 17 00:00:00 2001 From: Donal O'Sullivan Date: Wed, 21 Jan 2026 14:38:24 +0000 Subject: [PATCH 04/11] Update chlog for ScraperID and use only the issue number --- .chloggen/add-scraperID-to-error-logs.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.chloggen/add-scraperID-to-error-logs.yaml b/.chloggen/add-scraperID-to-error-logs.yaml index 08f26db9926..edf0214da00 100644 --- a/.chloggen/add-scraperID-to-error-logs.yaml +++ b/.chloggen/add-scraperID-to-error-logs.yaml @@ -10,4 +10,4 @@ component: scraper/scraperhelper note: ScraperID has been added to the error logs for metrics, logs, and profiler # One or more tracking issues or pull requests related to the change -issues: [https://github.com/open-telemetry/opentelemetry-collector-contrib/issues/35814] +issues: [35814] From 0c831a99078e0db9660d0d0cd1804a70b64f6e95 Mon Sep 17 00:00:00 2001 From: Donal O'Sullivan Date: Wed, 21 Jan 2026 14:58:54 +0000 Subject: [PATCH 05/11] Use pkg/scraper for chlog for ScraperID added to logs --- .chloggen/add-scraperID-to-error-logs.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.chloggen/add-scraperID-to-error-logs.yaml b/.chloggen/add-scraperID-to-error-logs.yaml index edf0214da00..d5050770244 100644 --- a/.chloggen/add-scraperID-to-error-logs.yaml +++ b/.chloggen/add-scraperID-to-error-logs.yaml @@ -4,10 +4,10 @@ change_type: enhancement # The name of the component, or a single word describing the area of concern, (e.g. receiver/otlp) -component: scraper/scraperhelper +component: pkg/scraperhelper # A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`). -note: ScraperID has been added to the error logs for metrics, logs, and profiler +note: ScraperID has been added to the error logs for metrics, logs, and profiles # One or more tracking issues or pull requests related to the change issues: [35814] From 548912f8f9bce59fa56efb6132e9975c314eab30 Mon Sep 17 00:00:00 2001 From: Donal O'Sullivan Date: Thu, 22 Jan 2026 09:31:35 +0000 Subject: [PATCH 06/11] Tag all scraper logs with scraperID via scraper Controller This allows us to inject the scraperID to all logs in the scrapers metrics, logs and profiles. We can make the change once and not have todo it in multiple places. Tests have also been updated --- scraper/scraperhelper/internal/controller/controller.go | 8 ++++++-- scraper/scraperhelper/obs_logs.go | 2 +- scraper/scraperhelper/obs_logs_test.go | 2 +- scraper/scraperhelper/obs_metrics.go | 2 +- scraper/scraperhelper/obs_metrics_test.go | 2 +- scraper/scraperhelper/xscraperhelper/obs_profiles.go | 2 +- scraper/scraperhelper/xscraperhelper/obs_profiles_test.go | 2 +- 7 files changed, 12 insertions(+), 8 deletions(-) diff --git a/scraper/scraperhelper/internal/controller/controller.go b/scraper/scraperhelper/internal/controller/controller.go index 3d00113c969..60f37cc9e2f 100644 --- a/scraper/scraperhelper/internal/controller/controller.go +++ b/scraper/scraperhelper/internal/controller/controller.go @@ -11,6 +11,7 @@ import ( "time" "go.uber.org/multierr" + "go.uber.org/zap" "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/receiver" @@ -124,9 +125,12 @@ func (sc *Controller[T]) startScraping() { } func GetSettings(sType component.Type, rSet receiver.Settings) scraper.Settings { + id := component.NewID(sType) + telemetry := rSet.TelemetrySettings + telemetry.Logger = telemetry.Logger.With(zap.String("scraper", id.String())) return scraper.Settings{ - ID: component.NewID(sType), - TelemetrySettings: rSet.TelemetrySettings, + ID: id, + TelemetrySettings: telemetry, BuildInfo: rSet.BuildInfo, } } diff --git a/scraper/scraperhelper/obs_logs.go b/scraper/scraperhelper/obs_logs.go index cfaee760d72..5e22e752258 100644 --- a/scraper/scraperhelper/obs_logs.go +++ b/scraper/scraperhelper/obs_logs.go @@ -50,7 +50,7 @@ func wrapObsLogs(sc scraper.Logs, receiverID, scraperID component.ID, set compon numScrapedLogs := 0 numErroredLogs := 0 if err != nil { - set.Logger.Error("Error scraping logs", zap.String("scraper", scraperID.String()), zap.Error(err)) + set.Logger.Error("Error scraping logs", zap.Error(err)) var partialErr scrapererror.PartialScrapeError if errors.As(err, &partialErr) { numErroredLogs = partialErr.Failed diff --git a/scraper/scraperhelper/obs_logs_test.go b/scraper/scraperhelper/obs_logs_test.go index 78996f952f8..aaac6be1133 100644 --- a/scraper/scraperhelper/obs_logs_test.go +++ b/scraper/scraperhelper/obs_logs_test.go @@ -105,7 +105,7 @@ func TestScrapeLogsDataOp_LogsScraperID(t *testing.T) { core, observedLogs := observer.New(zap.ErrorLevel) set := tel.NewTelemetrySettings() - set.Logger = zap.New(core) + set.Logger = zap.New(core).With(zap.String("scraper", scraperID.String())) sm, err := scraper.NewLogs(func(context.Context) (plog.Logs, error) { return plog.NewLogs(), errFake diff --git a/scraper/scraperhelper/obs_metrics.go b/scraper/scraperhelper/obs_metrics.go index b2168292915..066baec2694 100644 --- a/scraper/scraperhelper/obs_metrics.go +++ b/scraper/scraperhelper/obs_metrics.go @@ -58,7 +58,7 @@ func wrapObsMetrics(sc scraper.Metrics, receiverID, scraperID component.ID, set numScrapedMetrics := 0 numErroredMetrics := 0 if err != nil { - set.Logger.Error("Error scraping metrics", zap.String("scraper", scraperID.String()), zap.Error(err)) + set.Logger.Error("Error scraping metrics", zap.Error(err)) var partialErr scrapererror.PartialScrapeError if errors.As(err, &partialErr) { numErroredMetrics = partialErr.Failed diff --git a/scraper/scraperhelper/obs_metrics_test.go b/scraper/scraperhelper/obs_metrics_test.go index 0a12cf08c96..db41af8eb30 100644 --- a/scraper/scraperhelper/obs_metrics_test.go +++ b/scraper/scraperhelper/obs_metrics_test.go @@ -118,7 +118,7 @@ func TestScrapeMetricsDataOp_LogsScraperID(t *testing.T) { core, observedLogs := observer.New(zap.ErrorLevel) set := tel.NewTelemetrySettings() - set.Logger = zap.New(core) + set.Logger = zap.New(core).With(zap.String("scraper", scraperID.String())) sm, err := scraper.NewMetrics(func(context.Context) (pmetric.Metrics, error) { return pmetric.NewMetrics(), errFake diff --git a/scraper/scraperhelper/xscraperhelper/obs_profiles.go b/scraper/scraperhelper/xscraperhelper/obs_profiles.go index 0ca3619fd3a..084600c24a9 100644 --- a/scraper/scraperhelper/xscraperhelper/obs_profiles.go +++ b/scraper/scraperhelper/xscraperhelper/obs_profiles.go @@ -52,7 +52,7 @@ func wrapObsProfiles(sc xscraper.Profiles, receiverID, scraperID component.ID, s numScrapedProfiles := 0 numErroredProfiles := 0 if err != nil { - set.Logger.Error("Error scraping profiles", zap.String("scraper", scraperID.String()), zap.Error(err)) + set.Logger.Error("Error scraping profiles", zap.Error(err)) var partialErr scrapererror.PartialScrapeError if errors.As(err, &partialErr) { numErroredProfiles = partialErr.Failed diff --git a/scraper/scraperhelper/xscraperhelper/obs_profiles_test.go b/scraper/scraperhelper/xscraperhelper/obs_profiles_test.go index 73e59fdc7c8..33b50f4c856 100644 --- a/scraper/scraperhelper/xscraperhelper/obs_profiles_test.go +++ b/scraper/scraperhelper/xscraperhelper/obs_profiles_test.go @@ -120,7 +120,7 @@ func TestScrapeProfilesDataOp_LogsScraperID(t *testing.T) { core, observedLogs := observer.New(zap.ErrorLevel) set := tel.NewTelemetrySettings() - set.Logger = zap.New(core) + set.Logger = zap.New(core).With(zap.String("scraper", scraperID.String())) sm, err := xscraper.NewProfiles(func(context.Context) (pprofile.Profiles, error) { return pprofile.NewProfiles(), errFake From 4911a2ff4cc5a192875e37696838cd77f9534fbb Mon Sep 17 00:00:00 2001 From: Donal O'Sullivan Date: Thu, 22 Jan 2026 11:42:08 +0000 Subject: [PATCH 07/11] Update chlog and use the PR number as issue is in collector-contrib --- .chloggen/add-scraperID-to-error-logs.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.chloggen/add-scraperID-to-error-logs.yaml b/.chloggen/add-scraperID-to-error-logs.yaml index d5050770244..cdc0aae0814 100644 --- a/.chloggen/add-scraperID-to-error-logs.yaml +++ b/.chloggen/add-scraperID-to-error-logs.yaml @@ -10,4 +10,4 @@ component: pkg/scraperhelper note: ScraperID has been added to the error logs for metrics, logs, and profiles # One or more tracking issues or pull requests related to the change -issues: [35814] +issues: [14461] From 734704d6f28054a63e141d11f90f0a0142ecf12f Mon Sep 17 00:00:00 2001 From: Donal O'Sullivan Date: Thu, 22 Jan 2026 12:11:22 +0000 Subject: [PATCH 08/11] Test that GetSettings injects scraperID into logger Also a test has been added to test that logs outside of the scraper do not get the scraperID logged --- .../internal/controller/controller_test.go | 43 +++++++++++++++++++ scraper/scraperhelper/obs_logs_test.go | 14 ++++-- scraper/scraperhelper/obs_metrics_test.go | 14 ++++-- .../xscraperhelper/obs_profiles_test.go | 14 ++++-- 4 files changed, 76 insertions(+), 9 deletions(-) create mode 100644 scraper/scraperhelper/internal/controller/controller_test.go diff --git a/scraper/scraperhelper/internal/controller/controller_test.go b/scraper/scraperhelper/internal/controller/controller_test.go new file mode 100644 index 00000000000..926f5a234b1 --- /dev/null +++ b/scraper/scraperhelper/internal/controller/controller_test.go @@ -0,0 +1,43 @@ +package controller + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "go.uber.org/zap" + "go.uber.org/zap/zaptest/observer" + + "go.opentelemetry.io/collector/component" + "go.opentelemetry.io/collector/receiver" +) + +var ( + receiverID = component.MustNewID("fakeReceiver") + scraperID = component.MustNewID("fakeScraper") +) + +func TestGetSettings_DoesNotMutateReceiverSettings(t *testing.T) { + core, observedLogs := observer.New(zap.DebugLevel) + originalLogger := zap.New(core) + rSet := receiver.Settings{ + ID: receiverID, + TelemetrySettings: component.TelemetrySettings{ + Logger: originalLogger, + }, + } + scraperSettings := GetSettings(scraperID.Type(), rSet) + rSet.Logger.Error("test log from receiver") + scraperSettings.Logger.Error("test log from scraper") + + allLogs := observedLogs.All() + require.Len(t, allLogs, 2) + + receiverLog := allLogs[0] + assert.Equal(t, "test log from receiver", receiverLog.Message) + assert.NotContains(t, receiverLog.ContextMap(), "scraper") + + scraperLog := allLogs[1] + assert.Equal(t, "test log from scraper", scraperLog.Message) + assert.Equal(t, scraperID.String(), scraperLog.ContextMap()["scraper"]) +} diff --git a/scraper/scraperhelper/obs_logs_test.go b/scraper/scraperhelper/obs_logs_test.go index aaac6be1133..2153580cb88 100644 --- a/scraper/scraperhelper/obs_logs_test.go +++ b/scraper/scraperhelper/obs_logs_test.go @@ -21,7 +21,9 @@ import ( "go.opentelemetry.io/collector/component/componenttest" "go.opentelemetry.io/collector/pdata/plog" "go.opentelemetry.io/collector/pdata/testdata" + "go.opentelemetry.io/collector/receiver" "go.opentelemetry.io/collector/scraper" + "go.opentelemetry.io/collector/scraper/scraperhelper/internal/controller" "go.opentelemetry.io/collector/scraper/scraperhelper/internal/metadatatest" ) @@ -104,14 +106,20 @@ func TestScrapeLogsDataOp_LogsScraperID(t *testing.T) { t.Cleanup(func() { require.NoError(t, tel.Shutdown(context.Background())) }) core, observedLogs := observer.New(zap.ErrorLevel) - set := tel.NewTelemetrySettings() - set.Logger = zap.New(core).With(zap.String("scraper", scraperID.String())) + telset := tel.NewTelemetrySettings() + telset.Logger = zap.New(core) + + rSet := receiver.Settings{ + ID: receiverID, + TelemetrySettings: telset, + } + set := controller.GetSettings(scraperID.Type(), rSet) sm, err := scraper.NewLogs(func(context.Context) (plog.Logs, error) { return plog.NewLogs(), errFake }) require.NoError(t, err) - sf, err := wrapObsLogs(sm, receiverID, scraperID, set) + sf, err := wrapObsLogs(sm, receiverID, scraperID, set.TelemetrySettings) require.NoError(t, err) _, err = sf.ScrapeLogs(context.Background()) require.ErrorIs(t, err, errFake) diff --git a/scraper/scraperhelper/obs_metrics_test.go b/scraper/scraperhelper/obs_metrics_test.go index db41af8eb30..14a0e961983 100644 --- a/scraper/scraperhelper/obs_metrics_test.go +++ b/scraper/scraperhelper/obs_metrics_test.go @@ -21,8 +21,10 @@ import ( "go.opentelemetry.io/collector/component/componenttest" "go.opentelemetry.io/collector/pdata/pmetric" "go.opentelemetry.io/collector/pdata/testdata" + "go.opentelemetry.io/collector/receiver" "go.opentelemetry.io/collector/scraper" "go.opentelemetry.io/collector/scraper/scrapererror" + "go.opentelemetry.io/collector/scraper/scraperhelper/internal/controller" "go.opentelemetry.io/collector/scraper/scraperhelper/internal/metadatatest" ) @@ -117,14 +119,20 @@ func TestScrapeMetricsDataOp_LogsScraperID(t *testing.T) { t.Cleanup(func() { require.NoError(t, tel.Shutdown(context.Background())) }) core, observedLogs := observer.New(zap.ErrorLevel) - set := tel.NewTelemetrySettings() - set.Logger = zap.New(core).With(zap.String("scraper", scraperID.String())) + telset := tel.NewTelemetrySettings() + telset.Logger = zap.New(core) + + rSet := receiver.Settings{ + ID: receiverID, + TelemetrySettings: telset, + } + set := controller.GetSettings(scraperID.Type(), rSet) sm, err := scraper.NewMetrics(func(context.Context) (pmetric.Metrics, error) { return pmetric.NewMetrics(), errFake }) require.NoError(t, err) - sf, err := wrapObsMetrics(sm, receiverID, scraperID, set) + sf, err := wrapObsMetrics(sm, receiverID, scraperID, set.TelemetrySettings) require.NoError(t, err) _, err = sf.ScrapeMetrics(context.Background()) require.ErrorIs(t, err, errFake) diff --git a/scraper/scraperhelper/xscraperhelper/obs_profiles_test.go b/scraper/scraperhelper/xscraperhelper/obs_profiles_test.go index 33b50f4c856..3a10da77665 100644 --- a/scraper/scraperhelper/xscraperhelper/obs_profiles_test.go +++ b/scraper/scraperhelper/xscraperhelper/obs_profiles_test.go @@ -22,7 +22,9 @@ import ( "go.opentelemetry.io/collector/component/componenttest" "go.opentelemetry.io/collector/pdata/pprofile" "go.opentelemetry.io/collector/pdata/testdata" + "go.opentelemetry.io/collector/receiver" "go.opentelemetry.io/collector/scraper/scrapererror" + "go.opentelemetry.io/collector/scraper/scraperhelper/internal/controller" "go.opentelemetry.io/collector/scraper/scraperhelper/xscraperhelper/internal/metadatatest" "go.opentelemetry.io/collector/scraper/xscraper" ) @@ -119,14 +121,20 @@ func TestScrapeProfilesDataOp_LogsScraperID(t *testing.T) { t.Cleanup(func() { require.NoError(t, tel.Shutdown(context.Background())) }) core, observedLogs := observer.New(zap.ErrorLevel) - set := tel.NewTelemetrySettings() - set.Logger = zap.New(core).With(zap.String("scraper", scraperID.String())) + telset := tel.NewTelemetrySettings() + telset.Logger = zap.New(core) + + rSet := receiver.Settings{ + ID: receiverID, + TelemetrySettings: telset, + } + set := controller.GetSettings(scraperID.Type(), rSet) sm, err := xscraper.NewProfiles(func(context.Context) (pprofile.Profiles, error) { return pprofile.NewProfiles(), errFake }) require.NoError(t, err) - sf, err := wrapObsProfiles(sm, receiverID, scraperID, set) + sf, err := wrapObsProfiles(sm, receiverID, scraperID, set.TelemetrySettings) require.NoError(t, err) _, err = sf.ScrapeProfiles(context.Background()) require.ErrorIs(t, err, errFake) From d3776f5b6412a3da24af36f8c45a6ef3e7f5dbe1 Mon Sep 17 00:00:00 2001 From: Donal O'Sullivan Date: Thu, 22 Jan 2026 12:15:15 +0000 Subject: [PATCH 09/11] Add license to top of controller test file --- scraper/scraperhelper/internal/controller/controller_test.go | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/scraper/scraperhelper/internal/controller/controller_test.go b/scraper/scraperhelper/internal/controller/controller_test.go index 926f5a234b1..9ad7310e588 100644 --- a/scraper/scraperhelper/internal/controller/controller_test.go +++ b/scraper/scraperhelper/internal/controller/controller_test.go @@ -1,4 +1,7 @@ -package controller +// Copyright The OpenTelemetry Authors +// SPDX-License-Identifier: Apache-2.0 + +package controller // import "go.opentelemetry.io/collector/scraper/scraperhelper/internal/controller" import ( "testing" From fe0df99d18686e373ab2f65abebc4bf625e95a1a Mon Sep 17 00:00:00 2001 From: Donal O'Sullivan Date: Thu, 22 Jan 2026 13:48:05 +0000 Subject: [PATCH 10/11] Move controller_test for testing scraperID is not present in other logs --- scraper/scraperhelper/controller_test.go | 58 +++++++++++++++++++ .../internal/controller/controller_test.go | 46 --------------- 2 files changed, 58 insertions(+), 46 deletions(-) delete mode 100644 scraper/scraperhelper/internal/controller/controller_test.go diff --git a/scraper/scraperhelper/controller_test.go b/scraper/scraperhelper/controller_test.go index 1ec22dff8b2..8366dd677cd 100644 --- a/scraper/scraperhelper/controller_test.go +++ b/scraper/scraperhelper/controller_test.go @@ -16,6 +16,8 @@ import ( "go.opentelemetry.io/otel/sdk/metric/metricdata/metricdatatest" sdktrace "go.opentelemetry.io/otel/sdk/trace" "go.uber.org/multierr" + "go.uber.org/zap" + "go.uber.org/zap/zaptest/observer" "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/component/componenttest" @@ -763,3 +765,59 @@ func TestNewDefaultControllerConfig(t *testing.T) { intControllerConfig := controller.NewDefaultControllerConfig() require.Equal(t, intControllerConfig, controllerConfig) } + +func TestNewMetricsController_ScraperIDInErrorLogs(t *testing.T) { + t.Parallel() + + core, observedLogs := observer.New(zap.ErrorLevel) + tel := componenttest.NewTelemetry() + t.Cleanup(func() { require.NoError(t, tel.Shutdown(context.Background())) }) + telset := tel.NewTelemetrySettings() + telset.Logger = zap.New(core) + + receiverID := component.MustNewID("fakeReceiver") + scraperType := component.MustNewType("fakeScraper") + scrapeErr := errors.New("scrape error") + + scrapeCh := make(chan int, 1) + ts := &testScrape{ch: scrapeCh, err: scrapeErr} + scp, err := scraper.NewMetrics(ts.scrapeMetrics) + require.NoError(t, err) + + cfg := newTestNoDelaySettings() + tickerCh := make(chan time.Time) + + recv, err := NewMetricsController( + cfg, + receiver.Settings{ID: receiverID, TelemetrySettings: telset, BuildInfo: component.NewDefaultBuildInfo()}, + new(consumertest.MetricsSink), + AddMetricsScraper(scraperType, scp), + WithTickerChannel(tickerCh), + ) + require.NoError(t, err) + require.NoError(t, recv.Start(context.Background(), componenttest.NewNopHost())) + defer func() { require.NoError(t, recv.Shutdown(context.Background())) }() + + <-scrapeCh + + require.Eventually(t, func() bool { + return observedLogs.Len() >= 1 + }, time.Second, 10*time.Millisecond) + errorLogs := observedLogs.FilterLevelExact(zap.ErrorLevel).All() + require.Len(t, errorLogs, 1) + + assert.Equal(t, "Error scraping metrics", errorLogs[0].Message) + assert.Equal(t, scraperType.String(), errorLogs[0].ContextMap()["scraper"]) + assert.Equal(t, scrapeErr.Error(), errorLogs[0].ContextMap()["error"]) + + // Verify the original receiver telemetry settings logger was NOT mutated + // by logging something and checking it doesn't have the scraper field + telset.Logger.Error("test log from receiver") + + allLogs := observedLogs.FilterLevelExact(zap.ErrorLevel).All() + require.Len(t, allLogs, 2) + + receiverLog := allLogs[1] + assert.Equal(t, "test log from receiver", receiverLog.Message) + assert.NotContains(t, receiverLog.ContextMap(), "scraper") +} diff --git a/scraper/scraperhelper/internal/controller/controller_test.go b/scraper/scraperhelper/internal/controller/controller_test.go deleted file mode 100644 index 9ad7310e588..00000000000 --- a/scraper/scraperhelper/internal/controller/controller_test.go +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright The OpenTelemetry Authors -// SPDX-License-Identifier: Apache-2.0 - -package controller // import "go.opentelemetry.io/collector/scraper/scraperhelper/internal/controller" - -import ( - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "go.uber.org/zap" - "go.uber.org/zap/zaptest/observer" - - "go.opentelemetry.io/collector/component" - "go.opentelemetry.io/collector/receiver" -) - -var ( - receiverID = component.MustNewID("fakeReceiver") - scraperID = component.MustNewID("fakeScraper") -) - -func TestGetSettings_DoesNotMutateReceiverSettings(t *testing.T) { - core, observedLogs := observer.New(zap.DebugLevel) - originalLogger := zap.New(core) - rSet := receiver.Settings{ - ID: receiverID, - TelemetrySettings: component.TelemetrySettings{ - Logger: originalLogger, - }, - } - scraperSettings := GetSettings(scraperID.Type(), rSet) - rSet.Logger.Error("test log from receiver") - scraperSettings.Logger.Error("test log from scraper") - - allLogs := observedLogs.All() - require.Len(t, allLogs, 2) - - receiverLog := allLogs[0] - assert.Equal(t, "test log from receiver", receiverLog.Message) - assert.NotContains(t, receiverLog.ContextMap(), "scraper") - - scraperLog := allLogs[1] - assert.Equal(t, "test log from scraper", scraperLog.Message) - assert.Equal(t, scraperID.String(), scraperLog.ContextMap()["scraper"]) -} From abc7c74728ff6e6befc40a2e7a31ec8ebaaac0ff Mon Sep 17 00:00:00 2001 From: Donal O'Sullivan Date: Thu, 22 Jan 2026 16:27:11 +0000 Subject: [PATCH 11/11] Remove error from chlog --- .chloggen/add-scraperID-to-error-logs.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.chloggen/add-scraperID-to-error-logs.yaml b/.chloggen/add-scraperID-to-error-logs.yaml index cdc0aae0814..02ff0cd292f 100644 --- a/.chloggen/add-scraperID-to-error-logs.yaml +++ b/.chloggen/add-scraperID-to-error-logs.yaml @@ -7,7 +7,7 @@ change_type: enhancement component: pkg/scraperhelper # A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`). -note: ScraperID has been added to the error logs for metrics, logs, and profiles +note: ScraperID has been added to the logs for metrics, logs, and profiles # One or more tracking issues or pull requests related to the change issues: [14461]