diff --git a/CHANGELOG.md b/CHANGELOG.md index 39f33d3ee221..f673e2f2dd18 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -74,6 +74,9 @@ v0.37.2 (2023-10-16) config not being included in the river output. (@erikbaranowski) - Fix issue with default values in `discovery.nomad`. (@marctc) + +- Fix issue where two scrape options `prometheus.scrape` component not getting + applied after agent got reloaded. (hainenber) ### Enhancements diff --git a/component/prometheus/scrape/scrape.go b/component/prometheus/scrape/scrape.go index 1501f0ace870..0d5502c0c446 100644 --- a/component/prometheus/scrape/scrape.go +++ b/component/prometheus/scrape/scrape.go @@ -140,28 +140,12 @@ var ( // New creates a new prometheus.scrape component. func New(o component.Options, args Arguments) (*Component, error) { - data, err := o.GetServiceData(http.ServiceName) - if err != nil { - return nil, fmt.Errorf("failed to get information about HTTP server: %w", err) - } - httpData := data.(http.Data) - - data, err = o.GetServiceData(cluster.ServiceName) + data, err := o.GetServiceData(cluster.ServiceName) if err != nil { return nil, fmt.Errorf("failed to get information about cluster: %w", err) } clusterData := data.(cluster.Cluster) - flowAppendable := prometheus.NewFanout(args.ForwardTo, o.ID, o.Registerer) - scrapeOptions := &scrape.Options{ - ExtraMetrics: args.ExtraMetrics, - HTTPClientOptions: []config_util.HTTPClientOption{ - config_util.WithDialContextFunc(httpData.DialFunc), - }, - EnableProtobufNegotiation: args.EnableProtobufNegotiation, - } - scraper := scrape.NewManager(scrapeOptions, o.Logger, flowAppendable) - targetsGauge := client_prometheus.NewGauge(client_prometheus.GaugeOpts{ Name: "agent_prometheus_scrape_targets_gauge", Help: "Number of targets this component is configured to scrape"}) @@ -174,11 +158,19 @@ func New(o component.Options, args Arguments) (*Component, error) { opts: o, cluster: clusterData, reloadTargets: make(chan struct{}, 1), - scraper: scraper, - appendable: flowAppendable, targetsGauge: targetsGauge, } + // Update created component with prometheus.Fanout and prometheus.ScrapeManager + data, err = o.GetServiceData(http.ServiceName) + if err != nil { + return nil, fmt.Errorf("failed to get information about HTTP server: %w", err) + } + httpData := data.(http.Data) + flowAppendable, scraper := c.createPromScrapeResources(httpData, args) + c.appendable = flowAppendable + c.scraper = scraper + // Call to Update() to set the receivers and targets once at the start. if err := c.Update(args); err != nil { return nil, err @@ -236,13 +228,22 @@ func (c *Component) Update(args component.Arguments) error { defer c.mut.Unlock() c.args = newArgs + // Update scraper with scrape options + data, err := c.opts.GetServiceData(http.ServiceName) + if err != nil { + return fmt.Errorf("failed to get information about HTTP server: %w", err) + } + newFlowAppendables, newScraper := c.createPromScrapeResources(data.(http.Data), newArgs) + c.appendable = newFlowAppendables + c.scraper = newScraper + c.appendable.UpdateChildren(newArgs.ForwardTo) sc := getPromScrapeConfigs(c.opts.ID, newArgs) - err := c.scraper.ApplyConfig(&config.Config{ + + if err := c.scraper.ApplyConfig(&config.Config{ ScrapeConfigs: []*config.ScrapeConfig{sc}, - }) - if err != nil { + }); err != nil { return fmt.Errorf("error applying scrape configs: %w", err) } level.Debug(c.opts.Logger).Log("msg", "scrape config was updated") @@ -377,6 +378,21 @@ func (c *Component) componentTargetsToProm(jobName string, tgs []discovery.Targe return map[string][]*targetgroup.Group{jobName: {promGroup}} } +// newScrapeManager creates new prometheus.scrape manager +func (c *Component) createPromScrapeResources(httpData http.Data, args Arguments) (*prometheus.Fanout, *scrape.Manager) { + flowAppendable := prometheus.NewFanout(args.ForwardTo, c.opts.ID, c.opts.Registerer) + scrapeOptions := &scrape.Options{ + ExtraMetrics: args.ExtraMetrics, + HTTPClientOptions: []config_util.HTTPClientOption{ + config_util.WithDialContextFunc(httpData.DialFunc), + }, + EnableProtobufNegotiation: args.EnableProtobufNegotiation, + } + scraper := scrape.NewManager(scrapeOptions, c.opts.Logger, flowAppendable) + + return flowAppendable, scraper +} + func convertLabelSet(tg discovery.Target) model.LabelSet { lset := make(model.LabelSet, len(tg)) for k, v := range tg {