Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

pyroscope.scrape: Adjust handling of scrape_timeout #6252

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 8 additions & 8 deletions component/pyroscope/scrape/scrape.go
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,7 @@ func NewDefaultArguments() Arguments {
Scheme: "http",
HTTPClientConfig: component_config.DefaultHTTPClientConfig,
ScrapeInterval: 15 * time.Second,
ScrapeTimeout: 15*time.Second + (3 * time.Second),
ScrapeTimeout: 10 * time.Second,
ProfilingConfig: DefaultProfilingConfig,
}
}
Expand All @@ -205,16 +205,16 @@ func (arg *Arguments) SetToDefault() {

// Validate implements river.Validator.
func (arg *Arguments) Validate() error {
if arg.ScrapeTimeout <= 0 {
if arg.ScrapeTimeout.Seconds() <= 0 {
return fmt.Errorf("scrape_timeout must be greater than 0")
}
if arg.ScrapeTimeout <= arg.ScrapeInterval {
return fmt.Errorf("scrape_timeout must be greater than scrape_interval")
}

if cfg, ok := arg.ProfilingConfig.ProcessCPU, true; ok {
if cfg.Enabled && arg.ScrapeTimeout < time.Second*2 {
return fmt.Errorf("%v scrape_timeout must be at least 2 seconds", pprofProcessCPU)
// ScrapeInterval must be at least 2 seconds, because if
// ProfilingTarget.Delta is true the ScrapeInterval - 1s is propagated in
// the `seconds` parameter and it must be >= 1.
for _, target := range arg.ProfilingConfig.AllTargets() {
if target.Enabled && target.Delta && arg.ScrapeInterval.Seconds() < 2 {
return fmt.Errorf("scrape_interval must be at least 2 seconds when using delta profiling")
}
}

Expand Down
7 changes: 7 additions & 0 deletions component/pyroscope/scrape/scrape_loop.go
Original file line number Diff line number Diff line change
Expand Up @@ -174,6 +174,13 @@ type scrapeLoop struct {
}

func newScrapeLoop(t *Target, scrapeClient *http.Client, appendable pyroscope.Appendable, interval, timeout time.Duration, logger log.Logger) *scrapeLoop {
// if the URL parameter have a seconds parameter, then the collection will
// take at least scrape_duration - 1 second, as the HTTP request will block
// until the profile is collected.
if t.Params().Has("seconds") {
timeout += interval - time.Second
}

return &scrapeLoop{
Target: t,
logger: logger,
Expand Down
7 changes: 6 additions & 1 deletion component/pyroscope/scrape/scrape_loop_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,12 @@ func TestScrapePool(t *testing.T) {
args.ScrapeInterval = 2 * time.Second
p.reload(args)
for _, ta := range p.activeTargets {
require.Equal(t, 1*time.Second, ta.timeout)
if paramsSeconds := ta.params.Get("seconds"); paramsSeconds != "" {
// if the param is set timeout includes interval - 1s
require.Equal(t, 2*time.Second, ta.timeout)
} else {
require.Equal(t, 1*time.Second, ta.timeout)
}
require.Equal(t, 2*time.Second, ta.interval)
}
}
Expand Down
24 changes: 17 additions & 7 deletions component/pyroscope/scrape/scrape_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -142,30 +142,40 @@ func TestUnmarshalConfig(t *testing.T) {
return r
},
},
"invalid cpu timeout": {
"invalid cpu scrape_interval": {
in: `
targets = []
forward_to = null
scrape_timeout = "1s"
scrape_interval = "0.5s"
`,
expectedErr: "process_cpu scrape_timeout must be at least 2 seconds",
expectedErr: "scrape_interval must be at least 2 seconds when using delta profiling",
},
"invalid timeout/interval": {
"allow short scrape_intervals without delta": {
in: `
targets = []
forward_to = null
scrape_timeout = "4s"
scrape_interval = "5s"
scrape_interval = "0.5s"
profiling_config {
profile.process_cpu {
enabled = false
}
}
`,
expectedErr: "scrape_timeout must be greater than scrape_interval",
expected: func() Arguments {
r := NewDefaultArguments()
r.Targets = make([]discovery.Target, 0)
r.ScrapeInterval = 500 * time.Millisecond
r.ProfilingConfig.ProcessCPU.Enabled = false
return r
},
},
"invalid HTTPClientConfig": {
in: `
targets = []
forward_to = null
scrape_timeout = "5s"
scrape_interval = "1s"
scrape_interval = "2s"
bearer_token = "token"
bearer_token_file = "/path/to/file.token"
`,
Expand Down
Loading