Skip to content

Commit

Permalink
Merge pull request grafana/phlare#465 from grafana/fixes/463
Browse files Browse the repository at this point in the history
Fixes the scrape timeout validation.
  • Loading branch information
cyriltovena authored Jan 11, 2023
2 parents 0c2465f + 32d0696 commit 9958fe8
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 7 deletions.
14 changes: 8 additions & 6 deletions pkg/agent/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -122,16 +122,18 @@ func (c *ScrapeConfig) Validate() error {
if c.JobName == "" {
return fmt.Errorf("job_name is empty")
}
if c.ScrapeTimeout > c.ScrapeInterval {
return fmt.Errorf("scrape timeout must be larger or equal to inverval for: %v", c.JobName)
}
// Validate the scrape and timeout internal configuration. When /debug/pprof/profile scraping
// is enabled we need to make sure there is enough time to complete the scrape.
if c.ScrapeTimeout == 0 {
c.ScrapeTimeout = c.ScrapeInterval
c.ScrapeTimeout = c.ScrapeInterval + model.Duration(3*time.Second)
}
if c.ScrapeTimeout <= c.ScrapeInterval {
return fmt.Errorf("scrape timeout must be larger or equal to interval for: %v", c.JobName)
}

if cfg, ok := c.ProfilingConfig.PprofConfig[pprofProcessCPU]; ok {
if *cfg.Enabled && c.ScrapeTimeout < model.Duration(time.Second*2) {
return fmt.Errorf("%v scrape_timeout must be at least 2 seconds in %v", pprofProcessCPU, c.JobName)
if *cfg.Enabled && c.ScrapeInterval < model.Duration(time.Second*2) {
return fmt.Errorf("%v scrape_interval must be at least 2 seconds in %v", pprofProcessCPU, c.JobName)
}
}
return nil
Expand Down
2 changes: 1 addition & 1 deletion pkg/agent/profiles.go
Original file line number Diff line number Diff line change
Expand Up @@ -228,7 +228,7 @@ func (tg *TargetGroup) targetsFromGroup(group *targetgroup.Group) ([]*Target, []
}

if pcfg, found := tg.config.ProfilingConfig.PprofConfig[profType]; found && pcfg.Delta {
params.Add("seconds", strconv.Itoa(int(time.Duration(tg.config.ScrapeTimeout)/time.Second)-1))
params.Add("seconds", strconv.Itoa(int(time.Duration(tg.config.ScrapeInterval)/time.Second)-1))
}
targets = append(targets, &Target{
Target: scrape.NewTarget(lbls, origLabels, params),
Expand Down

0 comments on commit 9958fe8

Please sign in to comment.