1// Copyright 2013 The Prometheus Authors
2// Licensed under the Apache License, Version 2.0 (the "License");
3// you may not use this file except in compliance with the License.
4// You may obtain a copy of the License at
5//
6// http://www.apache.org/licenses/LICENSE-2.0
7//
8// Unless required by applicable law or agreed to in writing, software
9// distributed under the License is distributed on an "AS IS" BASIS,
10// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11// See the License for the specific language governing permissions and
12// limitations under the License.
13
14package scrape
15
16import (
17	"net/http"
18	"strconv"
19	"testing"
20	"time"
21
22	"github.com/prometheus/common/model"
23	"github.com/stretchr/testify/require"
24	yaml "gopkg.in/yaml.v2"
25
26	"github.com/prometheus/prometheus/config"
27	"github.com/prometheus/prometheus/discovery/targetgroup"
28	"github.com/prometheus/prometheus/pkg/labels"
29	"github.com/prometheus/prometheus/pkg/relabel"
30)
31
32func TestPopulateLabels(t *testing.T) {
33	cases := []struct {
34		in      labels.Labels
35		cfg     *config.ScrapeConfig
36		res     labels.Labels
37		resOrig labels.Labels
38		err     string
39	}{
40		// Regular population of scrape config options.
41		{
42			in: labels.FromMap(map[string]string{
43				model.AddressLabel: "1.2.3.4:1000",
44				"custom":           "value",
45			}),
46			cfg: &config.ScrapeConfig{
47				Scheme:         "https",
48				MetricsPath:    "/metrics",
49				JobName:        "job",
50				ScrapeInterval: model.Duration(time.Second),
51				ScrapeTimeout:  model.Duration(time.Second),
52			},
53			res: labels.FromMap(map[string]string{
54				model.AddressLabel:        "1.2.3.4:1000",
55				model.InstanceLabel:       "1.2.3.4:1000",
56				model.SchemeLabel:         "https",
57				model.MetricsPathLabel:    "/metrics",
58				model.JobLabel:            "job",
59				model.ScrapeIntervalLabel: "1s",
60				model.ScrapeTimeoutLabel:  "1s",
61				"custom":                  "value",
62			}),
63			resOrig: labels.FromMap(map[string]string{
64				model.AddressLabel:        "1.2.3.4:1000",
65				model.SchemeLabel:         "https",
66				model.MetricsPathLabel:    "/metrics",
67				model.JobLabel:            "job",
68				"custom":                  "value",
69				model.ScrapeIntervalLabel: "1s",
70				model.ScrapeTimeoutLabel:  "1s",
71			}),
72		},
73		// Pre-define/overwrite scrape config labels.
74		// Leave out port and expect it to be defaulted to scheme.
75		{
76			in: labels.FromMap(map[string]string{
77				model.AddressLabel:        "1.2.3.4",
78				model.SchemeLabel:         "http",
79				model.MetricsPathLabel:    "/custom",
80				model.JobLabel:            "custom-job",
81				model.ScrapeIntervalLabel: "2s",
82				model.ScrapeTimeoutLabel:  "2s",
83			}),
84			cfg: &config.ScrapeConfig{
85				Scheme:         "https",
86				MetricsPath:    "/metrics",
87				JobName:        "job",
88				ScrapeInterval: model.Duration(time.Second),
89				ScrapeTimeout:  model.Duration(time.Second),
90			},
91			res: labels.FromMap(map[string]string{
92				model.AddressLabel:        "1.2.3.4:80",
93				model.InstanceLabel:       "1.2.3.4:80",
94				model.SchemeLabel:         "http",
95				model.MetricsPathLabel:    "/custom",
96				model.JobLabel:            "custom-job",
97				model.ScrapeIntervalLabel: "2s",
98				model.ScrapeTimeoutLabel:  "2s",
99			}),
100			resOrig: labels.FromMap(map[string]string{
101				model.AddressLabel:        "1.2.3.4",
102				model.SchemeLabel:         "http",
103				model.MetricsPathLabel:    "/custom",
104				model.JobLabel:            "custom-job",
105				model.ScrapeIntervalLabel: "2s",
106				model.ScrapeTimeoutLabel:  "2s",
107			}),
108		},
109		// Provide instance label. HTTPS port default for IPv6.
110		{
111			in: labels.FromMap(map[string]string{
112				model.AddressLabel:  "[::1]",
113				model.InstanceLabel: "custom-instance",
114			}),
115			cfg: &config.ScrapeConfig{
116				Scheme:         "https",
117				MetricsPath:    "/metrics",
118				JobName:        "job",
119				ScrapeInterval: model.Duration(time.Second),
120				ScrapeTimeout:  model.Duration(time.Second),
121			},
122			res: labels.FromMap(map[string]string{
123				model.AddressLabel:        "[::1]:443",
124				model.InstanceLabel:       "custom-instance",
125				model.SchemeLabel:         "https",
126				model.MetricsPathLabel:    "/metrics",
127				model.JobLabel:            "job",
128				model.ScrapeIntervalLabel: "1s",
129				model.ScrapeTimeoutLabel:  "1s",
130			}),
131			resOrig: labels.FromMap(map[string]string{
132				model.AddressLabel:        "[::1]",
133				model.InstanceLabel:       "custom-instance",
134				model.SchemeLabel:         "https",
135				model.MetricsPathLabel:    "/metrics",
136				model.JobLabel:            "job",
137				model.ScrapeIntervalLabel: "1s",
138				model.ScrapeTimeoutLabel:  "1s",
139			}),
140		},
141		// Address label missing.
142		{
143			in: labels.FromStrings("custom", "value"),
144			cfg: &config.ScrapeConfig{
145				Scheme:         "https",
146				MetricsPath:    "/metrics",
147				JobName:        "job",
148				ScrapeInterval: model.Duration(time.Second),
149				ScrapeTimeout:  model.Duration(time.Second),
150			},
151			res:     nil,
152			resOrig: nil,
153			err:     "no address",
154		},
155		// Address label missing, but added in relabelling.
156		{
157			in: labels.FromStrings("custom", "host:1234"),
158			cfg: &config.ScrapeConfig{
159				Scheme:         "https",
160				MetricsPath:    "/metrics",
161				JobName:        "job",
162				ScrapeInterval: model.Duration(time.Second),
163				ScrapeTimeout:  model.Duration(time.Second),
164				RelabelConfigs: []*relabel.Config{
165					{
166						Action:       relabel.Replace,
167						Regex:        relabel.MustNewRegexp("(.*)"),
168						SourceLabels: model.LabelNames{"custom"},
169						Replacement:  "${1}",
170						TargetLabel:  string(model.AddressLabel),
171					},
172				},
173			},
174			res: labels.FromMap(map[string]string{
175				model.AddressLabel:        "host:1234",
176				model.InstanceLabel:       "host:1234",
177				model.SchemeLabel:         "https",
178				model.MetricsPathLabel:    "/metrics",
179				model.JobLabel:            "job",
180				model.ScrapeIntervalLabel: "1s",
181				model.ScrapeTimeoutLabel:  "1s",
182				"custom":                  "host:1234",
183			}),
184			resOrig: labels.FromMap(map[string]string{
185				model.SchemeLabel:         "https",
186				model.MetricsPathLabel:    "/metrics",
187				model.JobLabel:            "job",
188				model.ScrapeIntervalLabel: "1s",
189				model.ScrapeTimeoutLabel:  "1s",
190				"custom":                  "host:1234",
191			}),
192		},
193		// Address label missing, but added in relabelling.
194		{
195			in: labels.FromStrings("custom", "host:1234"),
196			cfg: &config.ScrapeConfig{
197				Scheme:         "https",
198				MetricsPath:    "/metrics",
199				JobName:        "job",
200				ScrapeInterval: model.Duration(time.Second),
201				ScrapeTimeout:  model.Duration(time.Second),
202				RelabelConfigs: []*relabel.Config{
203					{
204						Action:       relabel.Replace,
205						Regex:        relabel.MustNewRegexp("(.*)"),
206						SourceLabels: model.LabelNames{"custom"},
207						Replacement:  "${1}",
208						TargetLabel:  string(model.AddressLabel),
209					},
210				},
211			},
212			res: labels.FromMap(map[string]string{
213				model.AddressLabel:        "host:1234",
214				model.InstanceLabel:       "host:1234",
215				model.SchemeLabel:         "https",
216				model.MetricsPathLabel:    "/metrics",
217				model.JobLabel:            "job",
218				model.ScrapeIntervalLabel: "1s",
219				model.ScrapeTimeoutLabel:  "1s",
220				"custom":                  "host:1234",
221			}),
222			resOrig: labels.FromMap(map[string]string{
223				model.SchemeLabel:         "https",
224				model.MetricsPathLabel:    "/metrics",
225				model.JobLabel:            "job",
226				model.ScrapeIntervalLabel: "1s",
227				model.ScrapeTimeoutLabel:  "1s",
228				"custom":                  "host:1234",
229			}),
230		},
231		// Invalid UTF-8 in label.
232		{
233			in: labels.FromMap(map[string]string{
234				model.AddressLabel: "1.2.3.4:1000",
235				"custom":           "\xbd",
236			}),
237			cfg: &config.ScrapeConfig{
238				Scheme:         "https",
239				MetricsPath:    "/metrics",
240				JobName:        "job",
241				ScrapeInterval: model.Duration(time.Second),
242				ScrapeTimeout:  model.Duration(time.Second),
243			},
244			res:     nil,
245			resOrig: nil,
246			err:     "invalid label value for \"custom\": \"\\xbd\"",
247		},
248		// Invalid duration in interval label.
249		{
250			in: labels.FromMap(map[string]string{
251				model.AddressLabel:        "1.2.3.4:1000",
252				model.ScrapeIntervalLabel: "2notseconds",
253			}),
254			cfg: &config.ScrapeConfig{
255				Scheme:         "https",
256				MetricsPath:    "/metrics",
257				JobName:        "job",
258				ScrapeInterval: model.Duration(time.Second),
259				ScrapeTimeout:  model.Duration(time.Second),
260			},
261			res:     nil,
262			resOrig: nil,
263			err:     "error parsing scrape interval: not a valid duration string: \"2notseconds\"",
264		},
265		// Invalid duration in timeout label.
266		{
267			in: labels.FromMap(map[string]string{
268				model.AddressLabel:       "1.2.3.4:1000",
269				model.ScrapeTimeoutLabel: "2notseconds",
270			}),
271			cfg: &config.ScrapeConfig{
272				Scheme:         "https",
273				MetricsPath:    "/metrics",
274				JobName:        "job",
275				ScrapeInterval: model.Duration(time.Second),
276				ScrapeTimeout:  model.Duration(time.Second),
277			},
278			res:     nil,
279			resOrig: nil,
280			err:     "error parsing scrape timeout: not a valid duration string: \"2notseconds\"",
281		},
282		// 0 interval in timeout label.
283		{
284			in: labels.FromMap(map[string]string{
285				model.AddressLabel:        "1.2.3.4:1000",
286				model.ScrapeIntervalLabel: "0s",
287			}),
288			cfg: &config.ScrapeConfig{
289				Scheme:         "https",
290				MetricsPath:    "/metrics",
291				JobName:        "job",
292				ScrapeInterval: model.Duration(time.Second),
293				ScrapeTimeout:  model.Duration(time.Second),
294			},
295			res:     nil,
296			resOrig: nil,
297			err:     "scrape interval cannot be 0",
298		},
299		// 0 duration in timeout label.
300		{
301			in: labels.FromMap(map[string]string{
302				model.AddressLabel:       "1.2.3.4:1000",
303				model.ScrapeTimeoutLabel: "0s",
304			}),
305			cfg: &config.ScrapeConfig{
306				Scheme:         "https",
307				MetricsPath:    "/metrics",
308				JobName:        "job",
309				ScrapeInterval: model.Duration(time.Second),
310				ScrapeTimeout:  model.Duration(time.Second),
311			},
312			res:     nil,
313			resOrig: nil,
314			err:     "scrape timeout cannot be 0",
315		},
316		// Timeout less than interval.
317		{
318			in: labels.FromMap(map[string]string{
319				model.AddressLabel:        "1.2.3.4:1000",
320				model.ScrapeIntervalLabel: "1s",
321				model.ScrapeTimeoutLabel:  "2s",
322			}),
323			cfg: &config.ScrapeConfig{
324				Scheme:         "https",
325				MetricsPath:    "/metrics",
326				JobName:        "job",
327				ScrapeInterval: model.Duration(time.Second),
328				ScrapeTimeout:  model.Duration(time.Second),
329			},
330			res:     nil,
331			resOrig: nil,
332			err:     "scrape timeout cannot be greater than scrape interval (\"2s\" > \"1s\")",
333		},
334	}
335	for _, c := range cases {
336		in := c.in.Copy()
337
338		res, orig, err := populateLabels(c.in, c.cfg)
339		if c.err != "" {
340			require.EqualError(t, err, c.err)
341		} else {
342			require.NoError(t, err)
343		}
344		require.Equal(t, c.in, in)
345		require.Equal(t, c.res, res)
346		require.Equal(t, c.resOrig, orig)
347	}
348}
349
350func loadConfiguration(t *testing.T, c string) *config.Config {
351	t.Helper()
352
353	cfg := &config.Config{}
354	if err := yaml.UnmarshalStrict([]byte(c), cfg); err != nil {
355		t.Fatalf("Unable to load YAML config: %s", err)
356	}
357	return cfg
358}
359
360func noopLoop() loop {
361	return &testLoop{
362		startFunc: func(interval, timeout time.Duration, errc chan<- error) {},
363		stopFunc:  func() {},
364	}
365}
366
367func TestManagerApplyConfig(t *testing.T) {
368	// Valid initial configuration.
369	cfgText1 := `
370scrape_configs:
371 - job_name: job1
372   static_configs:
373   - targets: ["foo:9090"]
374`
375	// Invalid configuration.
376	cfgText2 := `
377scrape_configs:
378 - job_name: job1
379   scheme: https
380   static_configs:
381   - targets: ["foo:9090"]
382   tls_config:
383     ca_file: /not/existing/ca/file
384`
385	// Valid configuration.
386	cfgText3 := `
387scrape_configs:
388 - job_name: job1
389   scheme: https
390   static_configs:
391   - targets: ["foo:9090"]
392`
393	var (
394		cfg1 = loadConfiguration(t, cfgText1)
395		cfg2 = loadConfiguration(t, cfgText2)
396		cfg3 = loadConfiguration(t, cfgText3)
397
398		ch = make(chan struct{}, 1)
399	)
400
401	opts := Options{}
402	scrapeManager := NewManager(&opts, nil, nil)
403	newLoop := func(scrapeLoopOptions) loop {
404		ch <- struct{}{}
405		return noopLoop()
406	}
407	sp := &scrapePool{
408		appendable:    &nopAppendable{},
409		activeTargets: map[uint64]*Target{},
410		loops: map[uint64]loop{
411			1: noopLoop(),
412		},
413		newLoop: newLoop,
414		logger:  nil,
415		config:  cfg1.ScrapeConfigs[0],
416		client:  http.DefaultClient,
417	}
418	scrapeManager.scrapePools = map[string]*scrapePool{
419		"job1": sp,
420	}
421
422	// Apply the initial configuration.
423	if err := scrapeManager.ApplyConfig(cfg1); err != nil {
424		t.Fatalf("unable to apply configuration: %s", err)
425	}
426	select {
427	case <-ch:
428		t.Fatal("reload happened")
429	default:
430	}
431
432	// Apply a configuration for which the reload fails.
433	if err := scrapeManager.ApplyConfig(cfg2); err == nil {
434		t.Fatalf("expecting error but got none")
435	}
436	select {
437	case <-ch:
438		t.Fatal("reload happened")
439	default:
440	}
441
442	// Apply a configuration for which the reload succeeds.
443	if err := scrapeManager.ApplyConfig(cfg3); err != nil {
444		t.Fatalf("unable to apply configuration: %s", err)
445	}
446	select {
447	case <-ch:
448	default:
449		t.Fatal("reload didn't happen")
450	}
451
452	// Re-applying the same configuration shouldn't trigger a reload.
453	if err := scrapeManager.ApplyConfig(cfg3); err != nil {
454		t.Fatalf("unable to apply configuration: %s", err)
455	}
456	select {
457	case <-ch:
458		t.Fatal("reload happened")
459	default:
460	}
461}
462
463func TestManagerTargetsUpdates(t *testing.T) {
464	opts := Options{}
465	m := NewManager(&opts, nil, nil)
466
467	ts := make(chan map[string][]*targetgroup.Group)
468	go m.Run(ts)
469	defer m.Stop()
470
471	tgSent := make(map[string][]*targetgroup.Group)
472	for x := 0; x < 10; x++ {
473
474		tgSent[strconv.Itoa(x)] = []*targetgroup.Group{
475			{
476				Source: strconv.Itoa(x),
477			},
478		}
479
480		select {
481		case ts <- tgSent:
482		case <-time.After(10 * time.Millisecond):
483			t.Error("Scrape manager's channel remained blocked after the set threshold.")
484		}
485	}
486
487	m.mtxScrape.Lock()
488	tsetActual := m.targetSets
489	m.mtxScrape.Unlock()
490
491	// Make sure all updates have been received.
492	require.Equal(t, tgSent, tsetActual)
493
494	select {
495	case <-m.triggerReload:
496	default:
497		t.Error("No scrape loops reload was triggered after targets update.")
498	}
499}
500
501func TestSetJitter(t *testing.T) {
502	getConfig := func(prometheus string) *config.Config {
503		cfgText := `
504global:
505 external_labels:
506   prometheus: '` + prometheus + `'
507`
508
509		cfg := &config.Config{}
510		if err := yaml.UnmarshalStrict([]byte(cfgText), cfg); err != nil {
511			t.Fatalf("Unable to load YAML config cfgYaml: %s", err)
512		}
513
514		return cfg
515	}
516
517	opts := Options{}
518	scrapeManager := NewManager(&opts, nil, nil)
519
520	// Load the first config.
521	cfg1 := getConfig("ha1")
522	if err := scrapeManager.setJitterSeed(cfg1.GlobalConfig.ExternalLabels); err != nil {
523		t.Error(err)
524	}
525	jitter1 := scrapeManager.jitterSeed
526
527	if jitter1 == 0 {
528		t.Error("Jitter has to be a hash of uint64")
529	}
530
531	// Load the first config.
532	cfg2 := getConfig("ha2")
533	if err := scrapeManager.setJitterSeed(cfg2.GlobalConfig.ExternalLabels); err != nil {
534		t.Error(err)
535	}
536	jitter2 := scrapeManager.jitterSeed
537
538	if jitter1 == jitter2 {
539		t.Error("Jitter should not be the same on different set of external labels")
540	}
541}
542