| 
									
										
										
										
											2016-02-23 17:58:16 +08:00
										 |  |  | // Copyright 2016 The Prometheus Authors
 | 
					
						
							|  |  |  | // Licensed under the Apache License, Version 2.0 (the "License");
 | 
					
						
							|  |  |  | // you may not use this file except in compliance with the License.
 | 
					
						
							|  |  |  | // You may obtain a copy of the License at
 | 
					
						
							|  |  |  | //
 | 
					
						
							|  |  |  | // http://www.apache.org/licenses/LICENSE-2.0
 | 
					
						
							|  |  |  | //
 | 
					
						
							|  |  |  | // Unless required by applicable law or agreed to in writing, software
 | 
					
						
							|  |  |  | // distributed under the License is distributed on an "AS IS" BASIS,
 | 
					
						
							|  |  |  | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 | 
					
						
							|  |  |  | // See the License for the specific language governing permissions and
 | 
					
						
							|  |  |  | // limitations under the License.
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2018-02-01 17:55:07 +08:00
										 |  |  | package scrape | 
					
						
							| 
									
										
										
										
											2016-02-23 17:58:16 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | import ( | 
					
						
							| 
									
										
										
										
											2017-01-16 00:33:07 +08:00
										 |  |  | 	"bytes" | 
					
						
							| 
									
										
										
										
											2021-05-16 10:19:22 +08:00
										 |  |  | 	"compress/gzip" | 
					
						
							| 
									
										
										
										
											2017-10-25 12:21:42 +08:00
										 |  |  | 	"context" | 
					
						
							| 
									
										
										
										
											2016-02-28 16:51:02 +08:00
										 |  |  | 	"fmt" | 
					
						
							| 
									
										
										
										
											2017-01-16 00:33:07 +08:00
										 |  |  | 	"io" | 
					
						
							|  |  |  | 	"io/ioutil" | 
					
						
							| 
									
										
										
										
											2017-04-14 01:07:23 +08:00
										 |  |  | 	"math" | 
					
						
							| 
									
										
										
										
											2016-02-29 06:59:03 +08:00
										 |  |  | 	"net/http" | 
					
						
							|  |  |  | 	"net/http/httptest" | 
					
						
							|  |  |  | 	"net/url" | 
					
						
							|  |  |  | 	"strings" | 
					
						
							| 
									
										
										
										
											2016-02-28 16:51:02 +08:00
										 |  |  | 	"sync" | 
					
						
							| 
									
										
										
										
											2016-02-23 17:58:16 +08:00
										 |  |  | 	"testing" | 
					
						
							|  |  |  | 	"time" | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-06-12 00:17:59 +08:00
										 |  |  | 	"github.com/go-kit/log" | 
					
						
							| 
									
										
										
										
											2019-03-26 07:01:12 +08:00
										 |  |  | 	"github.com/pkg/errors" | 
					
						
							| 
									
										
										
										
											2019-02-13 21:24:22 +08:00
										 |  |  | 	dto "github.com/prometheus/client_model/go" | 
					
						
							| 
									
										
										
										
											2020-01-22 20:13:47 +08:00
										 |  |  | 	config_util "github.com/prometheus/common/config" | 
					
						
							| 
									
										
										
										
											2016-02-23 17:58:16 +08:00
										 |  |  | 	"github.com/prometheus/common/model" | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	"github.com/stretchr/testify/require" | 
					
						
							| 
									
										
										
										
											2016-02-23 17:58:16 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-02-23 18:56:09 +08:00
										 |  |  | 	"github.com/prometheus/prometheus/config" | 
					
						
							| 
									
										
										
										
											2018-01-04 22:13:31 +08:00
										 |  |  | 	"github.com/prometheus/prometheus/discovery/targetgroup" | 
					
						
							| 
									
										
										
										
											2021-03-16 17:47:45 +08:00
										 |  |  | 	"github.com/prometheus/prometheus/pkg/exemplar" | 
					
						
							| 
									
										
										
										
											2016-12-29 16:27:30 +08:00
										 |  |  | 	"github.com/prometheus/prometheus/pkg/labels" | 
					
						
							| 
									
										
										
										
											2019-02-13 21:24:22 +08:00
										 |  |  | 	"github.com/prometheus/prometheus/pkg/relabel" | 
					
						
							| 
									
										
										
										
											2018-05-18 15:32:11 +08:00
										 |  |  | 	"github.com/prometheus/prometheus/pkg/textparse" | 
					
						
							| 
									
										
										
										
											2017-04-11 22:42:17 +08:00
										 |  |  | 	"github.com/prometheus/prometheus/pkg/timestamp" | 
					
						
							| 
									
										
										
										
											2017-04-13 17:33:08 +08:00
										 |  |  | 	"github.com/prometheus/prometheus/pkg/value" | 
					
						
							| 
									
										
										
										
											2016-02-28 16:51:02 +08:00
										 |  |  | 	"github.com/prometheus/prometheus/storage" | 
					
						
							| 
									
										
										
										
											2019-08-09 09:35:39 +08:00
										 |  |  | 	"github.com/prometheus/prometheus/util/teststorage" | 
					
						
							| 
									
										
										
										
											2017-09-15 17:08:51 +08:00
										 |  |  | 	"github.com/prometheus/prometheus/util/testutil" | 
					
						
							| 
									
										
										
										
											2016-02-23 17:58:16 +08:00
										 |  |  | ) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-27 16:38:08 +08:00
										 |  |  | func TestMain(m *testing.M) { | 
					
						
							|  |  |  | 	testutil.TolerantVerifyLeak(m) | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-02-28 16:51:02 +08:00
										 |  |  | func TestNewScrapePool(t *testing.T) { | 
					
						
							|  |  |  | 	var ( | 
					
						
							| 
									
										
										
										
											2019-02-13 21:24:22 +08:00
										 |  |  | 		app   = &nopAppendable{} | 
					
						
							|  |  |  | 		cfg   = &config.ScrapeConfig{} | 
					
						
							| 
									
										
										
										
											2021-08-24 20:31:14 +08:00
										 |  |  | 		sp, _ = newScrapePool(cfg, app, 0, nil, false) | 
					
						
							| 
									
										
										
										
											2016-02-28 16:51:02 +08:00
										 |  |  | 	) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-12-31 04:35:35 +08:00
										 |  |  | 	if a, ok := sp.appendable.(*nopAppendable); !ok || a != app { | 
					
						
							| 
									
										
										
										
											2016-02-28 16:51:02 +08:00
										 |  |  | 		t.Fatalf("Wrong sample appender") | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 	if sp.config != cfg { | 
					
						
							|  |  |  | 		t.Fatalf("Wrong scrape config") | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 	if sp.newLoop == nil { | 
					
						
							|  |  |  | 		t.Fatalf("newLoop function not initialized") | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2018-01-04 22:13:31 +08:00
										 |  |  | func TestDroppedTargetsList(t *testing.T) { | 
					
						
							|  |  |  | 	var ( | 
					
						
							|  |  |  | 		app = &nopAppendable{} | 
					
						
							|  |  |  | 		cfg = &config.ScrapeConfig{ | 
					
						
							|  |  |  | 			JobName:        "dropMe", | 
					
						
							|  |  |  | 			ScrapeInterval: model.Duration(1), | 
					
						
							| 
									
										
										
										
											2018-12-18 19:26:36 +08:00
										 |  |  | 			RelabelConfigs: []*relabel.Config{ | 
					
						
							| 
									
										
										
										
											2018-01-04 22:13:31 +08:00
										 |  |  | 				{ | 
					
						
							| 
									
										
										
										
											2018-12-18 19:26:36 +08:00
										 |  |  | 					Action:       relabel.Drop, | 
					
						
							|  |  |  | 					Regex:        relabel.MustNewRegexp("dropMe"), | 
					
						
							| 
									
										
										
										
											2018-01-04 22:13:31 +08:00
										 |  |  | 					SourceLabels: model.LabelNames{"job"}, | 
					
						
							|  |  |  | 				}, | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 		tgs = []*targetgroup.Group{ | 
					
						
							|  |  |  | 			{ | 
					
						
							|  |  |  | 				Targets: []model.LabelSet{ | 
					
						
							| 
									
										
										
										
											2019-01-17 06:28:08 +08:00
										 |  |  | 					{model.AddressLabel: "127.0.0.1:9090"}, | 
					
						
							| 
									
										
										
										
											2018-01-04 22:13:31 +08:00
										 |  |  | 				}, | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		} | 
					
						
							| 
									
										
										
										
											2021-08-24 20:31:14 +08:00
										 |  |  | 		sp, _                  = newScrapePool(cfg, app, 0, nil, false) | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		expectedLabelSetString = "{__address__=\"127.0.0.1:9090\", __scrape_interval__=\"0s\", __scrape_timeout__=\"0s\", job=\"dropMe\"}" | 
					
						
							| 
									
										
										
										
											2018-01-04 22:13:31 +08:00
										 |  |  | 		expectedLength         = 1 | 
					
						
							|  |  |  | 	) | 
					
						
							|  |  |  | 	sp.Sync(tgs) | 
					
						
							|  |  |  | 	sp.Sync(tgs) | 
					
						
							|  |  |  | 	if len(sp.droppedTargets) != expectedLength { | 
					
						
							|  |  |  | 		t.Fatalf("Length of dropped targets exceeded expected length, expected %v, got %v", expectedLength, len(sp.droppedTargets)) | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 	if sp.droppedTargets[0].DiscoveredLabels().String() != expectedLabelSetString { | 
					
						
							|  |  |  | 		t.Fatalf("Got %v, expected %v", sp.droppedTargets[0].DiscoveredLabels().String(), expectedLabelSetString) | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2018-02-07 18:29:27 +08:00
										 |  |  | // TestDiscoveredLabelsUpdate checks that DiscoveredLabels are updated
 | 
					
						
							|  |  |  | // even when new labels don't affect the target `hash`.
 | 
					
						
							|  |  |  | func TestDiscoveredLabelsUpdate(t *testing.T) { | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	sp := &scrapePool{} | 
					
						
							|  |  |  | 	// These are used when syncing so need this to avoid a panic.
 | 
					
						
							|  |  |  | 	sp.config = &config.ScrapeConfig{ | 
					
						
							|  |  |  | 		ScrapeInterval: model.Duration(1), | 
					
						
							|  |  |  | 		ScrapeTimeout:  model.Duration(1), | 
					
						
							|  |  |  | 	} | 
					
						
							| 
									
										
										
										
											2018-09-26 17:20:56 +08:00
										 |  |  | 	sp.activeTargets = make(map[uint64]*Target) | 
					
						
							| 
									
										
										
										
											2018-02-07 18:29:27 +08:00
										 |  |  | 	t1 := &Target{ | 
					
						
							|  |  |  | 		discoveredLabels: labels.Labels{ | 
					
						
							|  |  |  | 			labels.Label{ | 
					
						
							|  |  |  | 				Name:  "label", | 
					
						
							|  |  |  | 				Value: "name", | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 	} | 
					
						
							| 
									
										
										
										
											2018-09-26 17:20:56 +08:00
										 |  |  | 	sp.activeTargets[t1.hash()] = t1 | 
					
						
							| 
									
										
										
										
											2018-02-07 18:29:27 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	t2 := &Target{ | 
					
						
							|  |  |  | 		discoveredLabels: labels.Labels{ | 
					
						
							|  |  |  | 			labels.Label{ | 
					
						
							|  |  |  | 				Name:  "labelNew", | 
					
						
							|  |  |  | 				Value: "nameNew", | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 	sp.sync([]*Target{t2}) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.Equal(t, t2.DiscoveredLabels(), sp.activeTargets[t1.hash()].DiscoveredLabels()) | 
					
						
							| 
									
										
										
										
											2018-02-07 18:29:27 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-02-28 16:51:02 +08:00
										 |  |  | type testLoop struct { | 
					
						
							| 
									
										
										
										
											2020-07-30 20:20:24 +08:00
										 |  |  | 	startFunc    func(interval, timeout time.Duration, errc chan<- error) | 
					
						
							|  |  |  | 	stopFunc     func() | 
					
						
							|  |  |  | 	forcedErr    error | 
					
						
							|  |  |  | 	forcedErrMtx sync.Mutex | 
					
						
							| 
									
										
										
										
											2020-10-01 02:21:32 +08:00
										 |  |  | 	runOnce      bool | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 	interval     time.Duration | 
					
						
							|  |  |  | 	timeout      time.Duration | 
					
						
							| 
									
										
										
										
											2016-02-28 16:51:02 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | func (l *testLoop) run(errc chan<- error) { | 
					
						
							| 
									
										
										
										
											2020-10-01 02:21:32 +08:00
										 |  |  | 	if l.runOnce { | 
					
						
							|  |  |  | 		panic("loop must be started only once") | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 	l.runOnce = true | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 	l.startFunc(l.interval, l.timeout, errc) | 
					
						
							| 
									
										
										
										
											2016-02-28 16:51:02 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-03-21 00:43:26 +08:00
										 |  |  | func (l *testLoop) disableEndOfRunStalenessMarkers() { | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-30 20:20:24 +08:00
										 |  |  | func (l *testLoop) setForcedError(err error) { | 
					
						
							|  |  |  | 	l.forcedErrMtx.Lock() | 
					
						
							|  |  |  | 	defer l.forcedErrMtx.Unlock() | 
					
						
							|  |  |  | 	l.forcedErr = err | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | func (l *testLoop) getForcedError() error { | 
					
						
							|  |  |  | 	l.forcedErrMtx.Lock() | 
					
						
							|  |  |  | 	defer l.forcedErrMtx.Unlock() | 
					
						
							|  |  |  | 	return l.forcedErr | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-02-28 16:51:02 +08:00
										 |  |  | func (l *testLoop) stop() { | 
					
						
							|  |  |  | 	l.stopFunc() | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-01-22 20:13:47 +08:00
										 |  |  | func (l *testLoop) getCache() *scrapeCache { | 
					
						
							|  |  |  | 	return nil | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-02-28 16:51:02 +08:00
										 |  |  | func TestScrapePoolStop(t *testing.T) { | 
					
						
							|  |  |  | 	sp := &scrapePool{ | 
					
						
							| 
									
										
										
										
											2018-09-26 17:20:56 +08:00
										 |  |  | 		activeTargets: map[uint64]*Target{}, | 
					
						
							|  |  |  | 		loops:         map[uint64]loop{}, | 
					
						
							|  |  |  | 		cancel:        func() {}, | 
					
						
							| 
									
										
										
										
											2019-04-10 20:20:00 +08:00
										 |  |  | 		client:        http.DefaultClient, | 
					
						
							| 
									
										
										
										
											2016-02-28 16:51:02 +08:00
										 |  |  | 	} | 
					
						
							|  |  |  | 	var mtx sync.Mutex | 
					
						
							| 
									
										
										
										
											2016-02-29 02:56:18 +08:00
										 |  |  | 	stopped := map[uint64]bool{} | 
					
						
							| 
									
										
										
										
											2016-02-28 16:51:02 +08:00
										 |  |  | 	numTargets := 20 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	// Stopping the scrape pool must call stop() on all scrape loops,
 | 
					
						
							|  |  |  | 	// clean them and the respective targets up. It must wait until each loop's
 | 
					
						
							|  |  |  | 	// stop function returned before returning itself.
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	for i := 0; i < numTargets; i++ { | 
					
						
							|  |  |  | 		t := &Target{ | 
					
						
							| 
									
										
										
										
											2016-12-29 16:27:30 +08:00
										 |  |  | 			labels: labels.FromStrings(model.AddressLabel, fmt.Sprintf("example.com:%d", i)), | 
					
						
							| 
									
										
										
										
											2016-02-28 16:51:02 +08:00
										 |  |  | 		} | 
					
						
							|  |  |  | 		l := &testLoop{} | 
					
						
							|  |  |  | 		l.stopFunc = func() { | 
					
						
							|  |  |  | 			time.Sleep(time.Duration(i*20) * time.Millisecond) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 			mtx.Lock() | 
					
						
							| 
									
										
										
										
											2016-02-29 02:56:18 +08:00
										 |  |  | 			stopped[t.hash()] = true | 
					
						
							| 
									
										
										
										
											2016-02-28 16:51:02 +08:00
										 |  |  | 			mtx.Unlock() | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2018-09-26 17:20:56 +08:00
										 |  |  | 		sp.activeTargets[t.hash()] = t | 
					
						
							| 
									
										
										
										
											2016-02-29 02:56:18 +08:00
										 |  |  | 		sp.loops[t.hash()] = l | 
					
						
							| 
									
										
										
										
											2016-02-28 16:51:02 +08:00
										 |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	done := make(chan struct{}) | 
					
						
							|  |  |  | 	stopTime := time.Now() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	go func() { | 
					
						
							|  |  |  | 		sp.stop() | 
					
						
							|  |  |  | 		close(done) | 
					
						
							|  |  |  | 	}() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	select { | 
					
						
							|  |  |  | 	case <-time.After(5 * time.Second): | 
					
						
							|  |  |  | 		t.Fatalf("scrapeLoop.stop() did not return as expected") | 
					
						
							|  |  |  | 	case <-done: | 
					
						
							|  |  |  | 		// This should have taken at least as long as the last target slept.
 | 
					
						
							|  |  |  | 		if time.Since(stopTime) < time.Duration(numTargets*20)*time.Millisecond { | 
					
						
							|  |  |  | 			t.Fatalf("scrapeLoop.stop() exited before all targets stopped") | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	mtx.Lock() | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.Equal(t, numTargets, len(stopped), "Unexpected number of stopped loops") | 
					
						
							| 
									
										
										
										
											2016-02-28 16:51:02 +08:00
										 |  |  | 	mtx.Unlock() | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.Equal(t, 0, len(sp.activeTargets), "Targets were not cleared on stopping: %d left", len(sp.activeTargets)) | 
					
						
							|  |  |  | 	require.Equal(t, 0, len(sp.loops), "Loops were not cleared on stopping: %d left", len(sp.loops)) | 
					
						
							| 
									
										
										
										
											2016-02-28 16:51:02 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | func TestScrapePoolReload(t *testing.T) { | 
					
						
							|  |  |  | 	var mtx sync.Mutex | 
					
						
							|  |  |  | 	numTargets := 20 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-02-29 02:56:18 +08:00
										 |  |  | 	stopped := map[uint64]bool{} | 
					
						
							| 
									
										
										
										
											2016-02-28 16:51:02 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	reloadCfg := &config.ScrapeConfig{ | 
					
						
							|  |  |  | 		ScrapeInterval: model.Duration(3 * time.Second), | 
					
						
							|  |  |  | 		ScrapeTimeout:  model.Duration(2 * time.Second), | 
					
						
							|  |  |  | 	} | 
					
						
							| 
									
										
										
										
											2016-09-15 11:23:28 +08:00
										 |  |  | 	// On starting to run, new loops created on reload check whether their preceding
 | 
					
						
							| 
									
										
										
										
											2016-02-28 16:51:02 +08:00
										 |  |  | 	// equivalents have been stopped.
 | 
					
						
							| 
									
										
										
										
											2019-03-12 18:26:18 +08:00
										 |  |  | 	newLoop := func(opts scrapeLoopOptions) loop { | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		l := &testLoop{interval: time.Duration(reloadCfg.ScrapeInterval), timeout: time.Duration(reloadCfg.ScrapeTimeout)} | 
					
						
							| 
									
										
										
										
											2016-02-28 16:51:02 +08:00
										 |  |  | 		l.startFunc = func(interval, timeout time.Duration, errc chan<- error) { | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 			require.Equal(t, 3*time.Second, interval, "Unexpected scrape interval") | 
					
						
							|  |  |  | 			require.Equal(t, 2*time.Second, timeout, "Unexpected scrape timeout") | 
					
						
							| 
									
										
										
										
											2019-11-05 07:43:42 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-02-28 16:51:02 +08:00
										 |  |  | 			mtx.Lock() | 
					
						
							| 
									
										
										
										
											2019-11-05 07:43:42 +08:00
										 |  |  | 			targetScraper := opts.scraper.(*targetScraper) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 			require.True(t, stopped[targetScraper.hash()], "Scrape loop for %v not stopped yet", targetScraper) | 
					
						
							| 
									
										
										
										
											2016-02-28 16:51:02 +08:00
										 |  |  | 			mtx.Unlock() | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 		return l | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 	sp := &scrapePool{ | 
					
						
							| 
									
										
										
										
											2018-09-26 17:20:56 +08:00
										 |  |  | 		appendable:    &nopAppendable{}, | 
					
						
							|  |  |  | 		activeTargets: map[uint64]*Target{}, | 
					
						
							|  |  |  | 		loops:         map[uint64]loop{}, | 
					
						
							|  |  |  | 		newLoop:       newLoop, | 
					
						
							|  |  |  | 		logger:        nil, | 
					
						
							| 
									
										
										
										
											2019-04-10 20:20:00 +08:00
										 |  |  | 		client:        http.DefaultClient, | 
					
						
							| 
									
										
										
										
											2016-02-28 16:51:02 +08:00
										 |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	// Reloading a scrape pool with a new scrape configuration must stop all scrape
 | 
					
						
							| 
									
										
										
										
											2016-09-15 11:23:28 +08:00
										 |  |  | 	// loops and start new ones. A new loop must not be started before the preceding
 | 
					
						
							| 
									
										
										
										
											2016-02-28 16:51:02 +08:00
										 |  |  | 	// one terminated.
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	for i := 0; i < numTargets; i++ { | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		labels := labels.FromStrings(model.AddressLabel, fmt.Sprintf("example.com:%d", i)) | 
					
						
							| 
									
										
										
										
											2016-02-28 16:51:02 +08:00
										 |  |  | 		t := &Target{ | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 			labels:           labels, | 
					
						
							|  |  |  | 			discoveredLabels: labels, | 
					
						
							| 
									
										
										
										
											2016-02-28 16:51:02 +08:00
										 |  |  | 		} | 
					
						
							|  |  |  | 		l := &testLoop{} | 
					
						
							|  |  |  | 		l.stopFunc = func() { | 
					
						
							|  |  |  | 			time.Sleep(time.Duration(i*20) * time.Millisecond) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 			mtx.Lock() | 
					
						
							| 
									
										
										
										
											2016-02-29 02:56:18 +08:00
										 |  |  | 			stopped[t.hash()] = true | 
					
						
							| 
									
										
										
										
											2016-02-28 16:51:02 +08:00
										 |  |  | 			mtx.Unlock() | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2018-09-26 17:20:56 +08:00
										 |  |  | 		sp.activeTargets[t.hash()] = t | 
					
						
							| 
									
										
										
										
											2016-02-29 02:56:18 +08:00
										 |  |  | 		sp.loops[t.hash()] = l | 
					
						
							| 
									
										
										
										
											2016-02-28 16:51:02 +08:00
										 |  |  | 	} | 
					
						
							|  |  |  | 	done := make(chan struct{}) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-02-29 02:56:18 +08:00
										 |  |  | 	beforeTargets := map[uint64]*Target{} | 
					
						
							| 
									
										
										
										
											2018-09-26 17:20:56 +08:00
										 |  |  | 	for h, t := range sp.activeTargets { | 
					
						
							| 
									
										
										
										
											2016-02-29 02:56:18 +08:00
										 |  |  | 		beforeTargets[h] = t | 
					
						
							| 
									
										
										
										
											2016-02-28 16:51:02 +08:00
										 |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	reloadTime := time.Now() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	go func() { | 
					
						
							|  |  |  | 		sp.reload(reloadCfg) | 
					
						
							|  |  |  | 		close(done) | 
					
						
							|  |  |  | 	}() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	select { | 
					
						
							|  |  |  | 	case <-time.After(5 * time.Second): | 
					
						
							|  |  |  | 		t.Fatalf("scrapeLoop.reload() did not return as expected") | 
					
						
							|  |  |  | 	case <-done: | 
					
						
							|  |  |  | 		// This should have taken at least as long as the last target slept.
 | 
					
						
							|  |  |  | 		if time.Since(reloadTime) < time.Duration(numTargets*20)*time.Millisecond { | 
					
						
							|  |  |  | 			t.Fatalf("scrapeLoop.stop() exited before all targets stopped") | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	mtx.Lock() | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.Equal(t, numTargets, len(stopped), "Unexpected number of stopped loops") | 
					
						
							| 
									
										
										
										
											2016-02-28 16:51:02 +08:00
										 |  |  | 	mtx.Unlock() | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.Equal(t, sp.activeTargets, beforeTargets, "Reloading affected target states unexpectedly") | 
					
						
							|  |  |  | 	require.Equal(t, numTargets, len(sp.loops), "Unexpected number of stopped loops after reload") | 
					
						
							| 
									
										
										
										
											2016-02-28 16:51:02 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-30 20:20:24 +08:00
										 |  |  | func TestScrapePoolTargetLimit(t *testing.T) { | 
					
						
							| 
									
										
										
										
											2020-10-01 02:21:32 +08:00
										 |  |  | 	var wg sync.WaitGroup | 
					
						
							| 
									
										
										
										
											2020-07-30 20:20:24 +08:00
										 |  |  | 	// On starting to run, new loops created on reload check whether their preceding
 | 
					
						
							|  |  |  | 	// equivalents have been stopped.
 | 
					
						
							|  |  |  | 	newLoop := func(opts scrapeLoopOptions) loop { | 
					
						
							| 
									
										
										
										
											2020-10-01 02:21:32 +08:00
										 |  |  | 		wg.Add(1) | 
					
						
							| 
									
										
										
										
											2020-07-30 20:20:24 +08:00
										 |  |  | 		l := &testLoop{ | 
					
						
							| 
									
										
										
										
											2020-10-01 02:21:32 +08:00
										 |  |  | 			startFunc: func(interval, timeout time.Duration, errc chan<- error) { | 
					
						
							|  |  |  | 				wg.Done() | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 			stopFunc: func() {}, | 
					
						
							| 
									
										
										
										
											2020-07-30 20:20:24 +08:00
										 |  |  | 		} | 
					
						
							|  |  |  | 		return l | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 	sp := &scrapePool{ | 
					
						
							|  |  |  | 		appendable:    &nopAppendable{}, | 
					
						
							|  |  |  | 		activeTargets: map[uint64]*Target{}, | 
					
						
							|  |  |  | 		loops:         map[uint64]loop{}, | 
					
						
							|  |  |  | 		newLoop:       newLoop, | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		logger:        log.NewNopLogger(), | 
					
						
							| 
									
										
										
										
											2020-07-30 20:20:24 +08:00
										 |  |  | 		client:        http.DefaultClient, | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	var tgs = []*targetgroup.Group{} | 
					
						
							|  |  |  | 	for i := 0; i < 50; i++ { | 
					
						
							|  |  |  | 		tgs = append(tgs, | 
					
						
							|  |  |  | 			&targetgroup.Group{ | 
					
						
							|  |  |  | 				Targets: []model.LabelSet{ | 
					
						
							|  |  |  | 					{model.AddressLabel: model.LabelValue(fmt.Sprintf("127.0.0.1:%d", 9090+i))}, | 
					
						
							|  |  |  | 				}, | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		) | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	var limit uint | 
					
						
							|  |  |  | 	reloadWithLimit := func(l uint) { | 
					
						
							|  |  |  | 		limit = l | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 		require.NoError(t, sp.reload(&config.ScrapeConfig{ | 
					
						
							| 
									
										
										
										
											2020-07-30 20:20:24 +08:00
										 |  |  | 			ScrapeInterval: model.Duration(3 * time.Second), | 
					
						
							|  |  |  | 			ScrapeTimeout:  model.Duration(2 * time.Second), | 
					
						
							|  |  |  | 			TargetLimit:    l, | 
					
						
							|  |  |  | 		})) | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	var targets int | 
					
						
							|  |  |  | 	loadTargets := func(n int) { | 
					
						
							|  |  |  | 		targets = n | 
					
						
							|  |  |  | 		sp.Sync(tgs[:n]) | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-10-01 02:21:32 +08:00
										 |  |  | 	validateIsRunning := func() { | 
					
						
							|  |  |  | 		wg.Wait() | 
					
						
							|  |  |  | 		for _, l := range sp.loops { | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 			require.True(t, l.(*testLoop).runOnce, "loop should be running") | 
					
						
							| 
									
										
										
										
											2020-10-01 02:21:32 +08:00
										 |  |  | 		} | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-30 20:20:24 +08:00
										 |  |  | 	validateErrorMessage := func(shouldErr bool) { | 
					
						
							|  |  |  | 		for _, l := range sp.loops { | 
					
						
							|  |  |  | 			lerr := l.(*testLoop).getForcedError() | 
					
						
							|  |  |  | 			if shouldErr { | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 				require.NotNil(t, lerr, "error was expected for %d targets with a limit of %d", targets, limit) | 
					
						
							|  |  |  | 				require.Equal(t, fmt.Sprintf("target_limit exceeded (number of targets: %d, limit: %d)", targets, limit), lerr.Error()) | 
					
						
							| 
									
										
										
										
											2020-07-30 20:20:24 +08:00
										 |  |  | 			} else { | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 				require.Equal(t, nil, lerr) | 
					
						
							| 
									
										
										
										
											2020-07-30 20:20:24 +08:00
										 |  |  | 			} | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	reloadWithLimit(0) | 
					
						
							|  |  |  | 	loadTargets(50) | 
					
						
							| 
									
										
										
										
											2020-10-01 02:21:32 +08:00
										 |  |  | 	validateIsRunning() | 
					
						
							| 
									
										
										
										
											2020-07-30 20:20:24 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	// Simulate an initial config with a limit.
 | 
					
						
							|  |  |  | 	sp.config.TargetLimit = 30 | 
					
						
							|  |  |  | 	limit = 30 | 
					
						
							|  |  |  | 	loadTargets(50) | 
					
						
							| 
									
										
										
										
											2020-10-01 02:21:32 +08:00
										 |  |  | 	validateIsRunning() | 
					
						
							| 
									
										
										
										
											2020-07-30 20:20:24 +08:00
										 |  |  | 	validateErrorMessage(true) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	reloadWithLimit(50) | 
					
						
							| 
									
										
										
										
											2020-10-01 02:21:32 +08:00
										 |  |  | 	validateIsRunning() | 
					
						
							| 
									
										
										
										
											2020-07-30 20:20:24 +08:00
										 |  |  | 	validateErrorMessage(false) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	reloadWithLimit(40) | 
					
						
							| 
									
										
										
										
											2020-10-01 02:21:32 +08:00
										 |  |  | 	validateIsRunning() | 
					
						
							| 
									
										
										
										
											2020-07-30 20:20:24 +08:00
										 |  |  | 	validateErrorMessage(true) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	loadTargets(30) | 
					
						
							| 
									
										
										
										
											2020-10-01 02:21:32 +08:00
										 |  |  | 	validateIsRunning() | 
					
						
							| 
									
										
										
										
											2020-07-30 20:20:24 +08:00
										 |  |  | 	validateErrorMessage(false) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	loadTargets(40) | 
					
						
							| 
									
										
										
										
											2020-10-01 02:21:32 +08:00
										 |  |  | 	validateIsRunning() | 
					
						
							| 
									
										
										
										
											2020-07-30 20:20:24 +08:00
										 |  |  | 	validateErrorMessage(false) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	loadTargets(41) | 
					
						
							| 
									
										
										
										
											2020-10-01 02:21:32 +08:00
										 |  |  | 	validateIsRunning() | 
					
						
							| 
									
										
										
										
											2020-07-30 20:20:24 +08:00
										 |  |  | 	validateErrorMessage(true) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-07-27 18:48:55 +08:00
										 |  |  | 	reloadWithLimit(0) | 
					
						
							|  |  |  | 	validateIsRunning() | 
					
						
							|  |  |  | 	validateErrorMessage(false) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-30 20:20:24 +08:00
										 |  |  | 	reloadWithLimit(51) | 
					
						
							| 
									
										
										
										
											2020-10-01 02:21:32 +08:00
										 |  |  | 	validateIsRunning() | 
					
						
							|  |  |  | 	validateErrorMessage(false) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	tgs = append(tgs, | 
					
						
							|  |  |  | 		&targetgroup.Group{ | 
					
						
							|  |  |  | 			Targets: []model.LabelSet{ | 
					
						
							|  |  |  | 				{model.AddressLabel: model.LabelValue("127.0.0.1:1090")}, | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		&targetgroup.Group{ | 
					
						
							|  |  |  | 			Targets: []model.LabelSet{ | 
					
						
							|  |  |  | 				{model.AddressLabel: model.LabelValue("127.0.0.1:1090")}, | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 	) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	sp.Sync(tgs) | 
					
						
							|  |  |  | 	validateIsRunning() | 
					
						
							| 
									
										
										
										
											2020-07-30 20:20:24 +08:00
										 |  |  | 	validateErrorMessage(false) | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | func TestScrapePoolAppender(t *testing.T) { | 
					
						
							|  |  |  | 	cfg := &config.ScrapeConfig{} | 
					
						
							| 
									
										
										
										
											2016-12-31 04:35:35 +08:00
										 |  |  | 	app := &nopAppendable{} | 
					
						
							| 
									
										
										
										
											2021-08-24 20:31:14 +08:00
										 |  |  | 	sp, _ := newScrapePool(cfg, app, 0, nil, false) | 
					
						
							| 
									
										
										
										
											2016-02-23 18:56:09 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-03-12 18:26:18 +08:00
										 |  |  | 	loop := sp.newLoop(scrapeLoopOptions{ | 
					
						
							|  |  |  | 		target: &Target{}, | 
					
						
							|  |  |  | 	}) | 
					
						
							| 
									
										
										
										
											2018-04-12 22:54:53 +08:00
										 |  |  | 	appl, ok := loop.(*scrapeLoop) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.True(t, ok, "Expected scrapeLoop but got %T", loop) | 
					
						
							| 
									
										
										
										
											2019-11-05 07:43:42 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-30 19:11:13 +08:00
										 |  |  | 	wrapped := appl.appender(context.Background()) | 
					
						
							| 
									
										
										
										
											2016-02-23 18:56:09 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 	tl, ok := wrapped.(*timeLimitAppender) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.True(t, ok, "Expected timeLimitAppender but got %T", wrapped) | 
					
						
							| 
									
										
										
										
											2019-11-05 07:43:42 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	_, ok = tl.Appender.(nopAppender) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.True(t, ok, "Expected base appender but got %T", tl.Appender) | 
					
						
							| 
									
										
										
										
											2016-02-23 18:56:09 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-03-12 18:26:18 +08:00
										 |  |  | 	loop = sp.newLoop(scrapeLoopOptions{ | 
					
						
							| 
									
										
										
										
											2021-05-06 16:56:21 +08:00
										 |  |  | 		target:      &Target{}, | 
					
						
							|  |  |  | 		sampleLimit: 100, | 
					
						
							| 
									
										
										
										
											2019-03-12 18:26:18 +08:00
										 |  |  | 	}) | 
					
						
							| 
									
										
										
										
											2018-04-12 22:54:53 +08:00
										 |  |  | 	appl, ok = loop.(*scrapeLoop) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.True(t, ok, "Expected scrapeLoop but got %T", loop) | 
					
						
							| 
									
										
										
										
											2019-11-05 07:43:42 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-30 19:11:13 +08:00
										 |  |  | 	wrapped = appl.appender(context.Background()) | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	sl, ok := wrapped.(*limitAppender) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.True(t, ok, "Expected limitAppender but got %T", wrapped) | 
					
						
							| 
									
										
										
										
											2019-11-05 07:43:42 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 	tl, ok = sl.Appender.(*timeLimitAppender) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.True(t, ok, "Expected limitAppender but got %T", sl.Appender) | 
					
						
							| 
									
										
										
										
											2019-11-05 07:43:42 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	_, ok = tl.Appender.(nopAppender) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.True(t, ok, "Expected base appender but got %T", tl.Appender) | 
					
						
							| 
									
										
										
										
											2016-02-23 18:56:09 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2018-04-13 20:21:41 +08:00
										 |  |  | func TestScrapePoolRaces(t *testing.T) { | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 	interval, _ := model.ParseDuration("1s") | 
					
						
							|  |  |  | 	timeout, _ := model.ParseDuration("500ms") | 
					
						
							| 
									
										
										
										
											2018-04-13 20:21:41 +08:00
										 |  |  | 	newConfig := func() *config.ScrapeConfig { | 
					
						
							|  |  |  | 		return &config.ScrapeConfig{ScrapeInterval: interval, ScrapeTimeout: timeout} | 
					
						
							|  |  |  | 	} | 
					
						
							| 
									
										
										
										
											2021-08-24 20:31:14 +08:00
										 |  |  | 	sp, _ := newScrapePool(newConfig(), &nopAppendable{}, 0, nil, false) | 
					
						
							| 
									
										
										
										
											2018-04-13 20:21:41 +08:00
										 |  |  | 	tgts := []*targetgroup.Group{ | 
					
						
							| 
									
										
										
										
											2019-01-17 06:28:08 +08:00
										 |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2018-04-13 20:21:41 +08:00
										 |  |  | 			Targets: []model.LabelSet{ | 
					
						
							| 
									
										
										
										
											2019-01-17 06:28:08 +08:00
										 |  |  | 				{model.AddressLabel: "127.0.0.1:9090"}, | 
					
						
							|  |  |  | 				{model.AddressLabel: "127.0.0.2:9090"}, | 
					
						
							|  |  |  | 				{model.AddressLabel: "127.0.0.3:9090"}, | 
					
						
							|  |  |  | 				{model.AddressLabel: "127.0.0.4:9090"}, | 
					
						
							|  |  |  | 				{model.AddressLabel: "127.0.0.5:9090"}, | 
					
						
							|  |  |  | 				{model.AddressLabel: "127.0.0.6:9090"}, | 
					
						
							|  |  |  | 				{model.AddressLabel: "127.0.0.7:9090"}, | 
					
						
							|  |  |  | 				{model.AddressLabel: "127.0.0.8:9090"}, | 
					
						
							| 
									
										
										
										
											2018-04-13 20:21:41 +08:00
										 |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2018-09-26 17:20:56 +08:00
										 |  |  | 	sp.Sync(tgts) | 
					
						
							|  |  |  | 	active := sp.ActiveTargets() | 
					
						
							|  |  |  | 	dropped := sp.DroppedTargets() | 
					
						
							| 
									
										
										
										
											2018-04-13 20:21:41 +08:00
										 |  |  | 	expectedActive, expectedDropped := len(tgts[0].Targets), 0 | 
					
						
							| 
									
										
										
										
											2019-11-05 07:43:42 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.Equal(t, expectedActive, len(active), "Invalid number of active targets") | 
					
						
							|  |  |  | 	require.Equal(t, expectedDropped, len(dropped), "Invalid number of dropped targets") | 
					
						
							| 
									
										
										
										
											2018-04-13 20:21:41 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	for i := 0; i < 20; i++ { | 
					
						
							|  |  |  | 		time.Sleep(time.Duration(10 * time.Millisecond)) | 
					
						
							|  |  |  | 		sp.reload(newConfig()) | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 	sp.stop() | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-10-01 02:21:32 +08:00
										 |  |  | func TestScrapePoolScrapeLoopsStarted(t *testing.T) { | 
					
						
							|  |  |  | 	var wg sync.WaitGroup | 
					
						
							|  |  |  | 	newLoop := func(opts scrapeLoopOptions) loop { | 
					
						
							|  |  |  | 		wg.Add(1) | 
					
						
							|  |  |  | 		l := &testLoop{ | 
					
						
							|  |  |  | 			startFunc: func(interval, timeout time.Duration, errc chan<- error) { | 
					
						
							|  |  |  | 				wg.Done() | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 			stopFunc: func() {}, | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 		return l | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 	sp := &scrapePool{ | 
					
						
							|  |  |  | 		appendable:    &nopAppendable{}, | 
					
						
							|  |  |  | 		activeTargets: map[uint64]*Target{}, | 
					
						
							|  |  |  | 		loops:         map[uint64]loop{}, | 
					
						
							|  |  |  | 		newLoop:       newLoop, | 
					
						
							|  |  |  | 		logger:        nil, | 
					
						
							|  |  |  | 		client:        http.DefaultClient, | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	tgs := []*targetgroup.Group{ | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			Targets: []model.LabelSet{ | 
					
						
							|  |  |  | 				{model.AddressLabel: model.LabelValue("127.0.0.1:9090")}, | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			Targets: []model.LabelSet{ | 
					
						
							|  |  |  | 				{model.AddressLabel: model.LabelValue("127.0.0.1:9090")}, | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.NoError(t, sp.reload(&config.ScrapeConfig{ | 
					
						
							| 
									
										
										
										
											2020-10-01 02:21:32 +08:00
										 |  |  | 		ScrapeInterval: model.Duration(3 * time.Second), | 
					
						
							|  |  |  | 		ScrapeTimeout:  model.Duration(2 * time.Second), | 
					
						
							|  |  |  | 	})) | 
					
						
							|  |  |  | 	sp.Sync(tgs) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.Equal(t, 1, len(sp.loops)) | 
					
						
							| 
									
										
										
										
											2020-10-01 02:21:32 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	wg.Wait() | 
					
						
							|  |  |  | 	for _, l := range sp.loops { | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 		require.True(t, l.(*testLoop).runOnce, "loop should be running") | 
					
						
							| 
									
										
										
										
											2020-10-01 02:21:32 +08:00
										 |  |  | 	} | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-05-10 23:59:02 +08:00
										 |  |  | func TestScrapeLoopStopBeforeRun(t *testing.T) { | 
					
						
							| 
									
										
										
										
											2016-02-28 16:51:02 +08:00
										 |  |  | 	scraper := &testScraper{} | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-30 19:11:13 +08:00
										 |  |  | 	sl := newScrapeLoop(context.Background(), | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 		scraper, | 
					
						
							|  |  |  | 		nil, nil, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							| 
									
										
										
										
											2019-03-12 18:46:15 +08:00
										 |  |  | 		nil, nil, 0, | 
					
						
							| 
									
										
										
										
											2019-03-15 18:04:15 +08:00
										 |  |  | 		true, | 
					
						
							| 
									
										
										
										
											2021-09-03 21:37:42 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-05-06 16:56:21 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		1, | 
					
						
							|  |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-08-24 20:31:14 +08:00
										 |  |  | 		false, | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 	) | 
					
						
							| 
									
										
										
										
											2016-02-28 16:51:02 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	// The scrape pool synchronizes on stopping scrape loops. However, new scrape
 | 
					
						
							| 
									
										
										
										
											2017-01-08 00:28:49 +08:00
										 |  |  | 	// loops are started asynchronously. Thus it's possible, that a loop is stopped
 | 
					
						
							| 
									
										
										
										
											2016-02-28 16:51:02 +08:00
										 |  |  | 	// again before having started properly.
 | 
					
						
							|  |  |  | 	// Stopping not-yet-started loops must block until the run method was called and exited.
 | 
					
						
							|  |  |  | 	// The run method must exit immediately.
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	stopDone := make(chan struct{}) | 
					
						
							|  |  |  | 	go func() { | 
					
						
							|  |  |  | 		sl.stop() | 
					
						
							|  |  |  | 		close(stopDone) | 
					
						
							|  |  |  | 	}() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	select { | 
					
						
							|  |  |  | 	case <-stopDone: | 
					
						
							|  |  |  | 		t.Fatalf("Stopping terminated before run exited successfully") | 
					
						
							|  |  |  | 	case <-time.After(500 * time.Millisecond): | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	// Running the scrape loop must exit before calling the scraper even once.
 | 
					
						
							| 
									
										
										
										
											2017-01-16 00:33:07 +08:00
										 |  |  | 	scraper.scrapeFunc = func(context.Context, io.Writer) error { | 
					
						
							| 
									
										
										
										
											2016-02-28 16:51:02 +08:00
										 |  |  | 		t.Fatalf("scraper was called for terminated scrape loop") | 
					
						
							| 
									
										
										
										
											2017-01-16 00:33:07 +08:00
										 |  |  | 		return nil | 
					
						
							| 
									
										
										
										
											2016-02-28 16:51:02 +08:00
										 |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	runDone := make(chan struct{}) | 
					
						
							|  |  |  | 	go func() { | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		sl.run(nil) | 
					
						
							| 
									
										
										
										
											2016-02-28 16:51:02 +08:00
										 |  |  | 		close(runDone) | 
					
						
							|  |  |  | 	}() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	select { | 
					
						
							|  |  |  | 	case <-runDone: | 
					
						
							|  |  |  | 	case <-time.After(1 * time.Second): | 
					
						
							|  |  |  | 		t.Fatalf("Running terminated scrape loop did not exit") | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	select { | 
					
						
							|  |  |  | 	case <-stopDone: | 
					
						
							|  |  |  | 	case <-time.After(1 * time.Second): | 
					
						
							|  |  |  | 		t.Fatalf("Stopping did not terminate after running exited") | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | func nopMutator(l labels.Labels) labels.Labels { return l } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-05-10 23:59:02 +08:00
										 |  |  | func TestScrapeLoopStop(t *testing.T) { | 
					
						
							|  |  |  | 	var ( | 
					
						
							| 
									
										
										
										
											2020-02-13 15:53:07 +08:00
										 |  |  | 		signal   = make(chan struct{}, 1) | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 		appender = &collectResultAppender{} | 
					
						
							|  |  |  | 		scraper  = &testScraper{} | 
					
						
							| 
									
										
										
										
											2020-07-24 22:10:51 +08:00
										 |  |  | 		app      = func(ctx context.Context) storage.Appender { return appender } | 
					
						
							| 
									
										
										
										
											2017-05-10 23:59:02 +08:00
										 |  |  | 	) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-30 19:11:13 +08:00
										 |  |  | 	sl := newScrapeLoop(context.Background(), | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 		scraper, | 
					
						
							|  |  |  | 		nil, nil, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							|  |  |  | 		app, | 
					
						
							| 
									
										
										
										
											2018-05-18 15:32:11 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2019-03-12 18:46:15 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2019-03-15 18:04:15 +08:00
										 |  |  | 		true, | 
					
						
							| 
									
										
										
										
											2021-09-03 21:37:42 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-05-06 16:56:21 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		10*time.Millisecond, | 
					
						
							|  |  |  | 		time.Hour, | 
					
						
							| 
									
										
										
										
											2021-08-24 20:31:14 +08:00
										 |  |  | 		false, | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 	) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	// Terminate loop after 2 scrapes.
 | 
					
						
							|  |  |  | 	numScrapes := 0 | 
					
						
							| 
									
										
										
										
											2017-05-10 23:59:02 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	scraper.scrapeFunc = func(ctx context.Context, w io.Writer) error { | 
					
						
							| 
									
										
										
										
											2017-05-26 16:44:48 +08:00
										 |  |  | 		numScrapes++ | 
					
						
							| 
									
										
										
										
											2017-05-10 23:59:02 +08:00
										 |  |  | 		if numScrapes == 2 { | 
					
						
							| 
									
										
										
										
											2017-05-26 16:44:48 +08:00
										 |  |  | 			go sl.stop() | 
					
						
							| 
									
										
										
										
											2020-08-07 21:58:16 +08:00
										 |  |  | 			<-sl.ctx.Done() | 
					
						
							| 
									
										
										
										
											2017-05-10 23:59:02 +08:00
										 |  |  | 		} | 
					
						
							|  |  |  | 		w.Write([]byte("metric_a 42\n")) | 
					
						
							| 
									
										
										
										
											2020-08-07 21:58:16 +08:00
										 |  |  | 		return ctx.Err() | 
					
						
							| 
									
										
										
										
											2017-05-10 23:59:02 +08:00
										 |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	go func() { | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		sl.run(nil) | 
					
						
							| 
									
										
										
										
											2017-05-10 23:59:02 +08:00
										 |  |  | 		signal <- struct{}{} | 
					
						
							|  |  |  | 	}() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	select { | 
					
						
							|  |  |  | 	case <-signal: | 
					
						
							|  |  |  | 	case <-time.After(5 * time.Second): | 
					
						
							|  |  |  | 		t.Fatalf("Scrape wasn't stopped.") | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-05-09 05:24:00 +08:00
										 |  |  | 	// We expected 1 actual sample for each scrape plus 5 for report samples.
 | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 	// At least 2 scrapes were made, plus the final stale markers.
 | 
					
						
							| 
									
										
										
										
											2019-05-09 05:24:00 +08:00
										 |  |  | 	if len(appender.result) < 6*3 || len(appender.result)%6 != 0 { | 
					
						
							|  |  |  | 		t.Fatalf("Expected at least 3 scrapes with 6 samples each, got %d samples", len(appender.result)) | 
					
						
							| 
									
										
										
										
											2017-05-11 21:43:43 +08:00
										 |  |  | 	} | 
					
						
							| 
									
										
										
										
											2018-04-08 17:51:54 +08:00
										 |  |  | 	// All samples in a scrape must have the same timestamp.
 | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 	var ts int64 | 
					
						
							|  |  |  | 	for i, s := range appender.result { | 
					
						
							| 
									
										
										
										
											2019-05-09 05:24:00 +08:00
										 |  |  | 		if i%6 == 0 { | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 			ts = s.t | 
					
						
							|  |  |  | 		} else if s.t != ts { | 
					
						
							|  |  |  | 			t.Fatalf("Unexpected multiple timestamps within single scrape") | 
					
						
							|  |  |  | 		} | 
					
						
							| 
									
										
										
										
											2017-05-11 21:43:43 +08:00
										 |  |  | 	} | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 	// All samples from the last scrape must be stale markers.
 | 
					
						
							|  |  |  | 	for _, s := range appender.result[len(appender.result)-5:] { | 
					
						
							|  |  |  | 		if !value.IsStaleNaN(s.v) { | 
					
						
							|  |  |  | 			t.Fatalf("Appended last sample not as expected. Wanted: stale NaN Got: %x", math.Float64bits(s.v)) | 
					
						
							|  |  |  | 		} | 
					
						
							| 
									
										
										
										
											2017-05-11 21:43:43 +08:00
										 |  |  | 	} | 
					
						
							| 
									
										
										
										
											2017-05-10 23:59:02 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-02-23 17:58:16 +08:00
										 |  |  | func TestScrapeLoopRun(t *testing.T) { | 
					
						
							|  |  |  | 	var ( | 
					
						
							| 
									
										
										
										
											2020-02-13 15:53:07 +08:00
										 |  |  | 		signal = make(chan struct{}, 1) | 
					
						
							| 
									
										
										
										
											2016-02-23 17:58:16 +08:00
										 |  |  | 		errc   = make(chan error) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 		scraper = &testScraper{} | 
					
						
							| 
									
										
										
										
											2020-07-24 22:10:51 +08:00
										 |  |  | 		app     = func(ctx context.Context) storage.Appender { return &nopAppender{} } | 
					
						
							| 
									
										
										
										
											2016-02-23 17:58:16 +08:00
										 |  |  | 	) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	ctx, cancel := context.WithCancel(context.Background()) | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 	sl := newScrapeLoop(ctx, | 
					
						
							|  |  |  | 		scraper, | 
					
						
							|  |  |  | 		nil, nil, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							|  |  |  | 		app, | 
					
						
							| 
									
										
										
										
											2018-05-18 15:32:11 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2019-03-12 18:46:15 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2019-03-15 18:04:15 +08:00
										 |  |  | 		true, | 
					
						
							| 
									
										
										
										
											2021-09-03 21:37:42 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-05-06 16:56:21 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		time.Second, | 
					
						
							|  |  |  | 		time.Hour, | 
					
						
							| 
									
										
										
										
											2021-08-24 20:31:14 +08:00
										 |  |  | 		false, | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 	) | 
					
						
							| 
									
										
										
										
											2016-02-23 17:58:16 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	// The loop must terminate during the initial offset if the context
 | 
					
						
							|  |  |  | 	// is canceled.
 | 
					
						
							|  |  |  | 	scraper.offsetDur = time.Hour | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	go func() { | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		sl.run(errc) | 
					
						
							| 
									
										
										
										
											2016-02-23 17:58:16 +08:00
										 |  |  | 		signal <- struct{}{} | 
					
						
							|  |  |  | 	}() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	// Wait to make sure we are actually waiting on the offset.
 | 
					
						
							|  |  |  | 	time.Sleep(1 * time.Second) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	cancel() | 
					
						
							|  |  |  | 	select { | 
					
						
							|  |  |  | 	case <-signal: | 
					
						
							|  |  |  | 	case <-time.After(5 * time.Second): | 
					
						
							| 
									
										
										
										
											2019-10-10 17:47:30 +08:00
										 |  |  | 		t.Fatalf("Cancellation during initial offset failed") | 
					
						
							| 
									
										
										
										
											2016-02-23 17:58:16 +08:00
										 |  |  | 	case err := <-errc: | 
					
						
							|  |  |  | 		t.Fatalf("Unexpected error: %s", err) | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-10-10 17:47:30 +08:00
										 |  |  | 	// The provided timeout must cause cancellation of the context passed down to the
 | 
					
						
							| 
									
										
										
										
											2016-02-23 17:58:16 +08:00
										 |  |  | 	// scraper. The scraper has to respect the context.
 | 
					
						
							|  |  |  | 	scraper.offsetDur = 0 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	block := make(chan struct{}) | 
					
						
							| 
									
										
										
										
											2017-01-16 00:33:07 +08:00
										 |  |  | 	scraper.scrapeFunc = func(ctx context.Context, _ io.Writer) error { | 
					
						
							| 
									
										
										
										
											2016-02-23 17:58:16 +08:00
										 |  |  | 		select { | 
					
						
							|  |  |  | 		case <-block: | 
					
						
							|  |  |  | 		case <-ctx.Done(): | 
					
						
							| 
									
										
										
										
											2017-01-16 00:33:07 +08:00
										 |  |  | 			return ctx.Err() | 
					
						
							| 
									
										
										
										
											2016-02-23 17:58:16 +08:00
										 |  |  | 		} | 
					
						
							| 
									
										
										
										
											2017-01-16 00:33:07 +08:00
										 |  |  | 		return nil | 
					
						
							| 
									
										
										
										
											2016-02-23 17:58:16 +08:00
										 |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	ctx, cancel = context.WithCancel(context.Background()) | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 	sl = newScrapeLoop(ctx, | 
					
						
							|  |  |  | 		scraper, | 
					
						
							|  |  |  | 		nil, nil, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							|  |  |  | 		app, | 
					
						
							| 
									
										
										
										
											2018-05-18 15:32:11 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2019-03-12 18:46:15 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2019-03-15 18:04:15 +08:00
										 |  |  | 		true, | 
					
						
							| 
									
										
										
										
											2021-09-03 21:37:42 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-05-06 16:56:21 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		time.Second, | 
					
						
							|  |  |  | 		100*time.Millisecond, | 
					
						
							| 
									
										
										
										
											2021-08-24 20:31:14 +08:00
										 |  |  | 		false, | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 	) | 
					
						
							| 
									
										
										
										
											2016-02-23 17:58:16 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	go func() { | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		sl.run(errc) | 
					
						
							| 
									
										
										
										
											2016-02-23 17:58:16 +08:00
										 |  |  | 		signal <- struct{}{} | 
					
						
							|  |  |  | 	}() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	select { | 
					
						
							|  |  |  | 	case err := <-errc: | 
					
						
							|  |  |  | 		if err != context.DeadlineExceeded { | 
					
						
							|  |  |  | 			t.Fatalf("Expected timeout error but got: %s", err) | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 	case <-time.After(3 * time.Second): | 
					
						
							|  |  |  | 		t.Fatalf("Expected timeout error but got none") | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	// We already caught the timeout error and are certainly in the loop.
 | 
					
						
							|  |  |  | 	// Let the scrapes returns immediately to cause no further timeout errors
 | 
					
						
							|  |  |  | 	// and check whether canceling the parent context terminates the loop.
 | 
					
						
							|  |  |  | 	close(block) | 
					
						
							|  |  |  | 	cancel() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	select { | 
					
						
							|  |  |  | 	case <-signal: | 
					
						
							|  |  |  | 		// Loop terminated as expected.
 | 
					
						
							|  |  |  | 	case err := <-errc: | 
					
						
							|  |  |  | 		t.Fatalf("Unexpected error: %s", err) | 
					
						
							|  |  |  | 	case <-time.After(3 * time.Second): | 
					
						
							| 
									
										
										
										
											2019-10-10 17:47:30 +08:00
										 |  |  | 		t.Fatalf("Loop did not terminate on context cancellation") | 
					
						
							| 
									
										
										
										
											2016-02-23 17:58:16 +08:00
										 |  |  | 	} | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-30 20:20:24 +08:00
										 |  |  | func TestScrapeLoopForcedErr(t *testing.T) { | 
					
						
							|  |  |  | 	var ( | 
					
						
							|  |  |  | 		signal = make(chan struct{}, 1) | 
					
						
							|  |  |  | 		errc   = make(chan error) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 		scraper = &testScraper{} | 
					
						
							| 
									
										
										
										
											2020-07-31 15:33:56 +08:00
										 |  |  | 		app     = func(ctx context.Context) storage.Appender { return &nopAppender{} } | 
					
						
							| 
									
										
										
										
											2020-07-30 20:20:24 +08:00
										 |  |  | 	) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	ctx, cancel := context.WithCancel(context.Background()) | 
					
						
							|  |  |  | 	sl := newScrapeLoop(ctx, | 
					
						
							|  |  |  | 		scraper, | 
					
						
							|  |  |  | 		nil, nil, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							|  |  |  | 		app, | 
					
						
							|  |  |  | 		nil, | 
					
						
							|  |  |  | 		0, | 
					
						
							|  |  |  | 		true, | 
					
						
							| 
									
										
										
										
											2021-09-03 21:37:42 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-05-06 16:56:21 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		time.Second, | 
					
						
							|  |  |  | 		time.Hour, | 
					
						
							| 
									
										
										
										
											2021-08-24 20:31:14 +08:00
										 |  |  | 		false, | 
					
						
							| 
									
										
										
										
											2020-07-30 20:20:24 +08:00
										 |  |  | 	) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	forcedErr := fmt.Errorf("forced err") | 
					
						
							|  |  |  | 	sl.setForcedError(forcedErr) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	scraper.scrapeFunc = func(context.Context, io.Writer) error { | 
					
						
							|  |  |  | 		t.Fatalf("should not be scraped") | 
					
						
							|  |  |  | 		return nil | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	go func() { | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		sl.run(errc) | 
					
						
							| 
									
										
										
										
											2020-07-30 20:20:24 +08:00
										 |  |  | 		signal <- struct{}{} | 
					
						
							|  |  |  | 	}() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	select { | 
					
						
							|  |  |  | 	case err := <-errc: | 
					
						
							|  |  |  | 		if err != forcedErr { | 
					
						
							|  |  |  | 			t.Fatalf("Expected forced error but got: %s", err) | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 	case <-time.After(3 * time.Second): | 
					
						
							|  |  |  | 		t.Fatalf("Expected forced error but got none") | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 	cancel() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	select { | 
					
						
							|  |  |  | 	case <-signal: | 
					
						
							|  |  |  | 	case <-time.After(5 * time.Second): | 
					
						
							|  |  |  | 		t.Fatalf("Scrape not stopped") | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2018-05-18 15:32:11 +08:00
										 |  |  | func TestScrapeLoopMetadata(t *testing.T) { | 
					
						
							|  |  |  | 	var ( | 
					
						
							|  |  |  | 		signal  = make(chan struct{}) | 
					
						
							|  |  |  | 		scraper = &testScraper{} | 
					
						
							|  |  |  | 		cache   = newScrapeCache() | 
					
						
							|  |  |  | 	) | 
					
						
							|  |  |  | 	defer close(signal) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	ctx, cancel := context.WithCancel(context.Background()) | 
					
						
							|  |  |  | 	sl := newScrapeLoop(ctx, | 
					
						
							|  |  |  | 		scraper, | 
					
						
							|  |  |  | 		nil, nil, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							| 
									
										
										
										
											2020-07-24 22:10:51 +08:00
										 |  |  | 		func(ctx context.Context) storage.Appender { return nopAppender{} }, | 
					
						
							| 
									
										
										
										
											2018-05-18 15:32:11 +08:00
										 |  |  | 		cache, | 
					
						
							| 
									
										
										
										
											2019-03-12 18:46:15 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2019-03-15 18:04:15 +08:00
										 |  |  | 		true, | 
					
						
							| 
									
										
										
										
											2021-09-03 21:37:42 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-05-06 16:56:21 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		0, | 
					
						
							|  |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-08-24 20:31:14 +08:00
										 |  |  | 		false, | 
					
						
							| 
									
										
										
										
											2018-05-18 15:32:11 +08:00
										 |  |  | 	) | 
					
						
							|  |  |  | 	defer cancel() | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-24 22:10:51 +08:00
										 |  |  | 	slApp := sl.appender(ctx) | 
					
						
							| 
									
										
										
										
											2020-07-16 19:53:39 +08:00
										 |  |  | 	total, _, _, err := sl.append(slApp, []byte(`# TYPE test_metric counter | 
					
						
							| 
									
										
										
										
											2018-05-18 15:32:11 +08:00
										 |  |  | # HELP test_metric some help text | 
					
						
							| 
									
										
										
										
											2018-10-06 00:11:16 +08:00
										 |  |  | # UNIT test_metric metric | 
					
						
							| 
									
										
										
										
											2018-05-18 15:32:11 +08:00
										 |  |  | test_metric 1 | 
					
						
							|  |  |  | # TYPE test_metric_no_help gauge | 
					
						
							| 
									
										
										
										
											2018-10-06 00:11:16 +08:00
										 |  |  | # HELP test_metric_no_type other help text | 
					
						
							|  |  |  | # EOF`), "application/openmetrics-text", time.Now()) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.NoError(t, err) | 
					
						
							|  |  |  | 	require.NoError(t, slApp.Commit()) | 
					
						
							|  |  |  | 	require.Equal(t, 1, total) | 
					
						
							| 
									
										
										
										
											2018-05-18 15:32:11 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-12-04 23:18:27 +08:00
										 |  |  | 	md, ok := cache.GetMetadata("test_metric") | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.True(t, ok, "expected metadata to be present") | 
					
						
							|  |  |  | 	require.Equal(t, textparse.MetricTypeCounter, md.Type, "unexpected metric type") | 
					
						
							|  |  |  | 	require.Equal(t, "some help text", md.Help) | 
					
						
							|  |  |  | 	require.Equal(t, "metric", md.Unit) | 
					
						
							| 
									
										
										
										
											2018-05-18 15:32:11 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-12-04 23:18:27 +08:00
										 |  |  | 	md, ok = cache.GetMetadata("test_metric_no_help") | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.True(t, ok, "expected metadata to be present") | 
					
						
							|  |  |  | 	require.Equal(t, textparse.MetricTypeGauge, md.Type, "unexpected metric type") | 
					
						
							|  |  |  | 	require.Equal(t, "", md.Help) | 
					
						
							|  |  |  | 	require.Equal(t, "", md.Unit) | 
					
						
							| 
									
										
										
										
											2018-05-18 15:32:11 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-12-04 23:18:27 +08:00
										 |  |  | 	md, ok = cache.GetMetadata("test_metric_no_type") | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.True(t, ok, "expected metadata to be present") | 
					
						
							|  |  |  | 	require.Equal(t, textparse.MetricTypeUnknown, md.Type, "unexpected metric type") | 
					
						
							|  |  |  | 	require.Equal(t, "other help text", md.Help) | 
					
						
							|  |  |  | 	require.Equal(t, "", md.Unit) | 
					
						
							| 
									
										
										
										
											2018-05-18 15:32:11 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-09-08 16:09:21 +08:00
										 |  |  | func simpleTestScrapeLoop(t testing.TB) (context.Context, *scrapeLoop) { | 
					
						
							| 
									
										
										
										
											2019-05-09 05:24:00 +08:00
										 |  |  | 	// Need a full storage for correct Add/AddFast semantics.
 | 
					
						
							| 
									
										
										
										
											2019-08-09 09:35:39 +08:00
										 |  |  | 	s := teststorage.New(t) | 
					
						
							| 
									
										
										
										
											2021-09-08 16:09:21 +08:00
										 |  |  | 	t.Cleanup(func() { s.Close() }) | 
					
						
							| 
									
										
										
										
											2019-05-09 05:24:00 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	ctx, cancel := context.WithCancel(context.Background()) | 
					
						
							|  |  |  | 	sl := newScrapeLoop(ctx, | 
					
						
							|  |  |  | 		&testScraper{}, | 
					
						
							|  |  |  | 		nil, nil, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							| 
									
										
										
										
											2020-07-17 18:30:22 +08:00
										 |  |  | 		s.Appender, | 
					
						
							| 
									
										
										
										
											2019-05-09 05:24:00 +08:00
										 |  |  | 		nil, | 
					
						
							|  |  |  | 		0, | 
					
						
							|  |  |  | 		true, | 
					
						
							| 
									
										
										
										
											2021-09-03 21:37:42 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-05-06 16:56:21 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		0, | 
					
						
							|  |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-08-24 20:31:14 +08:00
										 |  |  | 		false, | 
					
						
							| 
									
										
										
										
											2019-05-09 05:24:00 +08:00
										 |  |  | 	) | 
					
						
							| 
									
										
										
										
											2021-09-08 16:09:21 +08:00
										 |  |  | 	t.Cleanup(func() { cancel() }) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	return ctx, sl | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | func TestScrapeLoopSeriesAdded(t *testing.T) { | 
					
						
							|  |  |  | 	ctx, sl := simpleTestScrapeLoop(t) | 
					
						
							| 
									
										
										
										
											2019-05-09 05:24:00 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-24 22:10:51 +08:00
										 |  |  | 	slApp := sl.appender(ctx) | 
					
						
							| 
									
										
										
										
											2020-07-16 19:53:39 +08:00
										 |  |  | 	total, added, seriesAdded, err := sl.append(slApp, []byte("test_metric 1\n"), "", time.Time{}) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.NoError(t, err) | 
					
						
							|  |  |  | 	require.NoError(t, slApp.Commit()) | 
					
						
							|  |  |  | 	require.Equal(t, 1, total) | 
					
						
							|  |  |  | 	require.Equal(t, 1, added) | 
					
						
							|  |  |  | 	require.Equal(t, 1, seriesAdded) | 
					
						
							| 
									
										
										
										
											2019-05-09 05:24:00 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-24 22:10:51 +08:00
										 |  |  | 	slApp = sl.appender(ctx) | 
					
						
							| 
									
										
										
										
											2020-07-16 19:53:39 +08:00
										 |  |  | 	total, added, seriesAdded, err = sl.append(slApp, []byte("test_metric 1\n"), "", time.Time{}) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.NoError(t, slApp.Commit()) | 
					
						
							|  |  |  | 	require.NoError(t, err) | 
					
						
							|  |  |  | 	require.Equal(t, 1, total) | 
					
						
							|  |  |  | 	require.Equal(t, 1, added) | 
					
						
							|  |  |  | 	require.Equal(t, 0, seriesAdded) | 
					
						
							| 
									
										
										
										
											2019-05-09 05:24:00 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-09-08 16:09:21 +08:00
										 |  |  | func makeTestMetrics(n int) []byte { | 
					
						
							|  |  |  | 	// Construct a metrics string to parse
 | 
					
						
							|  |  |  | 	sb := bytes.Buffer{} | 
					
						
							|  |  |  | 	for i := 0; i < n; i++ { | 
					
						
							|  |  |  | 		fmt.Fprintf(&sb, "# TYPE metric_a gauge\n") | 
					
						
							|  |  |  | 		fmt.Fprintf(&sb, "# HELP metric_a help text\n") | 
					
						
							|  |  |  | 		fmt.Fprintf(&sb, "metric_a{foo=\"%d\",bar=\"%d\"} 1\n", i, i*100) | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 	return sb.Bytes() | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | func BenchmarkScrapeLoopAppend(b *testing.B) { | 
					
						
							|  |  |  | 	ctx, sl := simpleTestScrapeLoop(b) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	slApp := sl.appender(ctx) | 
					
						
							|  |  |  | 	metrics := makeTestMetrics(100) | 
					
						
							|  |  |  | 	ts := time.Time{} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	b.ResetTimer() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	for i := 0; i < b.N; i++ { | 
					
						
							|  |  |  | 		ts = ts.Add(time.Second) | 
					
						
							|  |  |  | 		_, _, _, _ = sl.append(slApp, metrics, "", ts) | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | func BenchmarkScrapeLoopAppendOM(b *testing.B) { | 
					
						
							|  |  |  | 	ctx, sl := simpleTestScrapeLoop(b) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	slApp := sl.appender(ctx) | 
					
						
							|  |  |  | 	metrics := makeTestMetrics(100) | 
					
						
							|  |  |  | 	ts := time.Time{} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	b.ResetTimer() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	for i := 0; i < b.N; i++ { | 
					
						
							|  |  |  | 		ts = ts.Add(time.Second) | 
					
						
							|  |  |  | 		_, _, _, _ = sl.append(slApp, metrics, "application/openmetrics-text", ts) | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-05-03 21:55:35 +08:00
										 |  |  | func TestScrapeLoopRunCreatesStaleMarkersOnFailedScrape(t *testing.T) { | 
					
						
							|  |  |  | 	appender := &collectResultAppender{} | 
					
						
							|  |  |  | 	var ( | 
					
						
							| 
									
										
										
										
											2020-02-13 15:53:07 +08:00
										 |  |  | 		signal  = make(chan struct{}, 1) | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 		scraper = &testScraper{} | 
					
						
							| 
									
										
										
										
											2020-07-24 22:10:51 +08:00
										 |  |  | 		app     = func(ctx context.Context) storage.Appender { return appender } | 
					
						
							| 
									
										
										
										
											2017-05-03 21:55:35 +08:00
										 |  |  | 	) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	ctx, cancel := context.WithCancel(context.Background()) | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 	sl := newScrapeLoop(ctx, | 
					
						
							|  |  |  | 		scraper, | 
					
						
							|  |  |  | 		nil, nil, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							|  |  |  | 		app, | 
					
						
							| 
									
										
										
										
											2018-05-18 15:32:11 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2019-03-12 18:46:15 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2019-03-15 18:04:15 +08:00
										 |  |  | 		true, | 
					
						
							| 
									
										
										
										
											2021-09-03 21:37:42 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-05-06 16:56:21 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		10*time.Millisecond, | 
					
						
							|  |  |  | 		time.Hour, | 
					
						
							| 
									
										
										
										
											2021-08-24 20:31:14 +08:00
										 |  |  | 		false, | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 	) | 
					
						
							| 
									
										
										
										
											2017-05-03 21:55:35 +08:00
										 |  |  | 	// Succeed once, several failures, then stop.
 | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 	numScrapes := 0 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-05-03 21:55:35 +08:00
										 |  |  | 	scraper.scrapeFunc = func(ctx context.Context, w io.Writer) error { | 
					
						
							| 
									
										
										
										
											2017-05-26 16:44:48 +08:00
										 |  |  | 		numScrapes++ | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-05-03 21:55:35 +08:00
										 |  |  | 		if numScrapes == 1 { | 
					
						
							|  |  |  | 			w.Write([]byte("metric_a 42\n")) | 
					
						
							|  |  |  | 			return nil | 
					
						
							|  |  |  | 		} else if numScrapes == 5 { | 
					
						
							|  |  |  | 			cancel() | 
					
						
							|  |  |  | 		} | 
					
						
							| 
									
										
										
										
											2019-03-26 07:01:12 +08:00
										 |  |  | 		return errors.New("scrape failed") | 
					
						
							| 
									
										
										
										
											2017-05-03 21:55:35 +08:00
										 |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	go func() { | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		sl.run(nil) | 
					
						
							| 
									
										
										
										
											2017-05-03 21:55:35 +08:00
										 |  |  | 		signal <- struct{}{} | 
					
						
							|  |  |  | 	}() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	select { | 
					
						
							|  |  |  | 	case <-signal: | 
					
						
							|  |  |  | 	case <-time.After(5 * time.Second): | 
					
						
							|  |  |  | 		t.Fatalf("Scrape wasn't stopped.") | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-05-09 05:24:00 +08:00
										 |  |  | 	// 1 successfully scraped sample, 1 stale marker after first fail, 5 report samples for
 | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 	// each scrape successful or not.
 | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.Equal(t, 27, len(appender.result), "Appended samples not as expected") | 
					
						
							|  |  |  | 	require.Equal(t, 42.0, appender.result[0].v, "Appended first sample not as expected") | 
					
						
							|  |  |  | 	require.True(t, value.IsStaleNaN(appender.result[6].v), | 
					
						
							| 
									
										
										
										
											2019-11-05 07:43:42 +08:00
										 |  |  | 		"Appended second sample not as expected. Wanted: stale NaN Got: %x", math.Float64bits(appender.result[6].v)) | 
					
						
							| 
									
										
										
										
											2017-05-03 23:51:45 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | func TestScrapeLoopRunCreatesStaleMarkersOnParseFailure(t *testing.T) { | 
					
						
							|  |  |  | 	appender := &collectResultAppender{} | 
					
						
							|  |  |  | 	var ( | 
					
						
							| 
									
										
										
										
											2020-02-13 15:53:07 +08:00
										 |  |  | 		signal     = make(chan struct{}, 1) | 
					
						
							| 
									
										
										
										
											2017-05-03 23:51:45 +08:00
										 |  |  | 		scraper    = &testScraper{} | 
					
						
							| 
									
										
										
										
											2020-07-24 22:10:51 +08:00
										 |  |  | 		app        = func(ctx context.Context) storage.Appender { return appender } | 
					
						
							| 
									
										
										
										
											2017-05-03 23:51:45 +08:00
										 |  |  | 		numScrapes = 0 | 
					
						
							|  |  |  | 	) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	ctx, cancel := context.WithCancel(context.Background()) | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 	sl := newScrapeLoop(ctx, | 
					
						
							|  |  |  | 		scraper, | 
					
						
							|  |  |  | 		nil, nil, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							|  |  |  | 		app, | 
					
						
							| 
									
										
										
										
											2018-05-18 15:32:11 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2019-03-12 18:46:15 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2019-03-15 18:04:15 +08:00
										 |  |  | 		true, | 
					
						
							| 
									
										
										
										
											2021-09-03 21:37:42 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-05-06 16:56:21 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		10*time.Millisecond, | 
					
						
							|  |  |  | 		time.Hour, | 
					
						
							| 
									
										
										
										
											2021-08-24 20:31:14 +08:00
										 |  |  | 		false, | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 	) | 
					
						
							| 
									
										
										
										
											2017-05-03 23:51:45 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	// Succeed once, several failures, then stop.
 | 
					
						
							|  |  |  | 	scraper.scrapeFunc = func(ctx context.Context, w io.Writer) error { | 
					
						
							| 
									
										
										
										
											2017-05-26 16:44:48 +08:00
										 |  |  | 		numScrapes++ | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-05-03 23:51:45 +08:00
										 |  |  | 		if numScrapes == 1 { | 
					
						
							|  |  |  | 			w.Write([]byte("metric_a 42\n")) | 
					
						
							|  |  |  | 			return nil | 
					
						
							|  |  |  | 		} else if numScrapes == 2 { | 
					
						
							|  |  |  | 			w.Write([]byte("7&-\n")) | 
					
						
							|  |  |  | 			return nil | 
					
						
							|  |  |  | 		} else if numScrapes == 3 { | 
					
						
							|  |  |  | 			cancel() | 
					
						
							|  |  |  | 		} | 
					
						
							| 
									
										
										
										
											2019-03-26 07:01:12 +08:00
										 |  |  | 		return errors.New("scrape failed") | 
					
						
							| 
									
										
										
										
											2017-05-03 23:51:45 +08:00
										 |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	go func() { | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		sl.run(nil) | 
					
						
							| 
									
										
										
										
											2017-05-03 23:51:45 +08:00
										 |  |  | 		signal <- struct{}{} | 
					
						
							|  |  |  | 	}() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	select { | 
					
						
							|  |  |  | 	case <-signal: | 
					
						
							|  |  |  | 	case <-time.After(5 * time.Second): | 
					
						
							|  |  |  | 		t.Fatalf("Scrape wasn't stopped.") | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-05-09 05:24:00 +08:00
										 |  |  | 	// 1 successfully scraped sample, 1 stale marker after first fail, 5 report samples for
 | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 	// each scrape successful or not.
 | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.Equal(t, 17, len(appender.result), "Appended samples not as expected") | 
					
						
							|  |  |  | 	require.Equal(t, 42.0, appender.result[0].v, "Appended first sample not as expected") | 
					
						
							|  |  |  | 	require.True(t, value.IsStaleNaN(appender.result[6].v), | 
					
						
							| 
									
										
										
										
											2019-11-05 07:43:42 +08:00
										 |  |  | 		"Appended second sample not as expected. Wanted: stale NaN Got: %x", math.Float64bits(appender.result[6].v)) | 
					
						
							| 
									
										
										
										
											2019-03-29 01:07:14 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | func TestScrapeLoopCache(t *testing.T) { | 
					
						
							| 
									
										
										
										
											2019-08-09 09:35:39 +08:00
										 |  |  | 	s := teststorage.New(t) | 
					
						
							| 
									
										
										
										
											2019-03-29 01:07:14 +08:00
										 |  |  | 	defer s.Close() | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-17 18:30:22 +08:00
										 |  |  | 	appender := &collectResultAppender{} | 
					
						
							| 
									
										
										
										
											2019-03-29 01:07:14 +08:00
										 |  |  | 	var ( | 
					
						
							| 
									
										
										
										
											2020-02-13 15:53:07 +08:00
										 |  |  | 		signal  = make(chan struct{}, 1) | 
					
						
							| 
									
										
										
										
											2019-03-29 01:07:14 +08:00
										 |  |  | 		scraper = &testScraper{} | 
					
						
							| 
									
										
										
										
											2020-07-24 22:10:51 +08:00
										 |  |  | 		app     = func(ctx context.Context) storage.Appender { appender.next = s.Appender(ctx); return appender } | 
					
						
							| 
									
										
										
										
											2019-03-29 01:07:14 +08:00
										 |  |  | 	) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	ctx, cancel := context.WithCancel(context.Background()) | 
					
						
							|  |  |  | 	sl := newScrapeLoop(ctx, | 
					
						
							|  |  |  | 		scraper, | 
					
						
							|  |  |  | 		nil, nil, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							|  |  |  | 		app, | 
					
						
							|  |  |  | 		nil, | 
					
						
							|  |  |  | 		0, | 
					
						
							|  |  |  | 		true, | 
					
						
							| 
									
										
										
										
											2021-09-03 21:37:42 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-05-06 16:56:21 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		10*time.Millisecond, | 
					
						
							|  |  |  | 		time.Hour, | 
					
						
							| 
									
										
										
										
											2021-08-24 20:31:14 +08:00
										 |  |  | 		false, | 
					
						
							| 
									
										
										
										
											2019-03-29 01:07:14 +08:00
										 |  |  | 	) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	numScrapes := 0 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	scraper.scrapeFunc = func(ctx context.Context, w io.Writer) error { | 
					
						
							|  |  |  | 		if numScrapes == 1 || numScrapes == 2 { | 
					
						
							|  |  |  | 			if _, ok := sl.cache.series["metric_a"]; !ok { | 
					
						
							|  |  |  | 				t.Errorf("metric_a missing from cache after scrape %d", numScrapes) | 
					
						
							|  |  |  | 			} | 
					
						
							|  |  |  | 			if _, ok := sl.cache.series["metric_b"]; !ok { | 
					
						
							|  |  |  | 				t.Errorf("metric_b missing from cache after scrape %d", numScrapes) | 
					
						
							|  |  |  | 			} | 
					
						
							|  |  |  | 		} else if numScrapes == 3 { | 
					
						
							|  |  |  | 			if _, ok := sl.cache.series["metric_a"]; !ok { | 
					
						
							|  |  |  | 				t.Errorf("metric_a missing from cache after scrape %d", numScrapes) | 
					
						
							|  |  |  | 			} | 
					
						
							|  |  |  | 			if _, ok := sl.cache.series["metric_b"]; ok { | 
					
						
							|  |  |  | 				t.Errorf("metric_b present in cache after scrape %d", numScrapes) | 
					
						
							|  |  |  | 			} | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 		numScrapes++ | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 		if numScrapes == 1 { | 
					
						
							|  |  |  | 			w.Write([]byte("metric_a 42\nmetric_b 43\n")) | 
					
						
							|  |  |  | 			return nil | 
					
						
							|  |  |  | 		} else if numScrapes == 3 { | 
					
						
							|  |  |  | 			w.Write([]byte("metric_a 44\n")) | 
					
						
							|  |  |  | 			return nil | 
					
						
							|  |  |  | 		} else if numScrapes == 4 { | 
					
						
							|  |  |  | 			cancel() | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 		return fmt.Errorf("scrape failed") | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	go func() { | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		sl.run(nil) | 
					
						
							| 
									
										
										
										
											2019-03-29 01:07:14 +08:00
										 |  |  | 		signal <- struct{}{} | 
					
						
							|  |  |  | 	}() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	select { | 
					
						
							|  |  |  | 	case <-signal: | 
					
						
							|  |  |  | 	case <-time.After(5 * time.Second): | 
					
						
							|  |  |  | 		t.Fatalf("Scrape wasn't stopped.") | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-05-09 05:24:00 +08:00
										 |  |  | 	// 1 successfully scraped sample, 1 stale marker after first fail, 5 report samples for
 | 
					
						
							| 
									
										
										
										
											2019-03-29 01:07:14 +08:00
										 |  |  | 	// each scrape successful or not.
 | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.Equal(t, 26, len(appender.result), "Appended samples not as expected") | 
					
						
							| 
									
										
										
										
											2019-03-29 01:52:46 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | func TestScrapeLoopCacheMemoryExhaustionProtection(t *testing.T) { | 
					
						
							| 
									
										
										
										
											2019-08-09 09:35:39 +08:00
										 |  |  | 	s := teststorage.New(t) | 
					
						
							| 
									
										
										
										
											2019-03-29 01:52:46 +08:00
										 |  |  | 	defer s.Close() | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-24 22:10:51 +08:00
										 |  |  | 	sapp := s.Appender(context.Background()) | 
					
						
							| 
									
										
										
										
											2019-11-05 07:43:42 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-03-29 01:52:46 +08:00
										 |  |  | 	appender := &collectResultAppender{next: sapp} | 
					
						
							|  |  |  | 	var ( | 
					
						
							| 
									
										
										
										
											2020-02-13 15:53:07 +08:00
										 |  |  | 		signal  = make(chan struct{}, 1) | 
					
						
							| 
									
										
										
										
											2019-03-29 01:52:46 +08:00
										 |  |  | 		scraper = &testScraper{} | 
					
						
							| 
									
										
										
										
											2020-07-24 22:10:51 +08:00
										 |  |  | 		app     = func(ctx context.Context) storage.Appender { return appender } | 
					
						
							| 
									
										
										
										
											2019-03-29 01:52:46 +08:00
										 |  |  | 	) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	ctx, cancel := context.WithCancel(context.Background()) | 
					
						
							|  |  |  | 	sl := newScrapeLoop(ctx, | 
					
						
							|  |  |  | 		scraper, | 
					
						
							|  |  |  | 		nil, nil, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							|  |  |  | 		app, | 
					
						
							|  |  |  | 		nil, | 
					
						
							|  |  |  | 		0, | 
					
						
							|  |  |  | 		true, | 
					
						
							| 
									
										
										
										
											2021-09-03 21:37:42 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-05-06 16:56:21 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		10*time.Millisecond, | 
					
						
							|  |  |  | 		time.Hour, | 
					
						
							| 
									
										
										
										
											2021-08-24 20:31:14 +08:00
										 |  |  | 		false, | 
					
						
							| 
									
										
										
										
											2019-03-29 01:52:46 +08:00
										 |  |  | 	) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	numScrapes := 0 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	scraper.scrapeFunc = func(ctx context.Context, w io.Writer) error { | 
					
						
							|  |  |  | 		numScrapes++ | 
					
						
							|  |  |  | 		if numScrapes < 5 { | 
					
						
							|  |  |  | 			s := "" | 
					
						
							|  |  |  | 			for i := 0; i < 500; i++ { | 
					
						
							|  |  |  | 				s = fmt.Sprintf("%smetric_%d_%d 42\n", s, i, numScrapes) | 
					
						
							|  |  |  | 			} | 
					
						
							|  |  |  | 			w.Write([]byte(fmt.Sprintf(s + "&"))) | 
					
						
							|  |  |  | 		} else { | 
					
						
							|  |  |  | 			cancel() | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 		return nil | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	go func() { | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		sl.run(nil) | 
					
						
							| 
									
										
										
										
											2019-03-29 01:52:46 +08:00
										 |  |  | 		signal <- struct{}{} | 
					
						
							|  |  |  | 	}() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	select { | 
					
						
							|  |  |  | 	case <-signal: | 
					
						
							|  |  |  | 	case <-time.After(5 * time.Second): | 
					
						
							|  |  |  | 		t.Fatalf("Scrape wasn't stopped.") | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	if len(sl.cache.series) > 2000 { | 
					
						
							|  |  |  | 		t.Fatalf("More than 2000 series cached. Got: %d", len(sl.cache.series)) | 
					
						
							|  |  |  | 	} | 
					
						
							| 
									
										
										
										
											2017-05-03 21:55:35 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-04-11 22:42:17 +08:00
										 |  |  | func TestScrapeLoopAppend(t *testing.T) { | 
					
						
							| 
									
										
										
										
											2018-02-15 22:26:24 +08:00
										 |  |  | 	tests := []struct { | 
					
						
							|  |  |  | 		title           string | 
					
						
							|  |  |  | 		honorLabels     bool | 
					
						
							|  |  |  | 		scrapeLabels    string | 
					
						
							|  |  |  | 		discoveryLabels []string | 
					
						
							|  |  |  | 		expLset         labels.Labels | 
					
						
							|  |  |  | 		expValue        float64 | 
					
						
							|  |  |  | 	}{ | 
					
						
							| 
									
										
										
										
											2017-04-11 22:42:17 +08:00
										 |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2018-02-15 22:26:24 +08:00
										 |  |  | 			// When "honor_labels" is not set
 | 
					
						
							|  |  |  | 			// label name collision is handler by adding a prefix.
 | 
					
						
							|  |  |  | 			title:           "Label name collision", | 
					
						
							|  |  |  | 			honorLabels:     false, | 
					
						
							|  |  |  | 			scrapeLabels:    `metric{n="1"} 0`, | 
					
						
							|  |  |  | 			discoveryLabels: []string{"n", "2"}, | 
					
						
							|  |  |  | 			expLset:         labels.FromStrings("__name__", "metric", "exported_n", "1", "n", "2"), | 
					
						
							|  |  |  | 			expValue:        0, | 
					
						
							| 
									
										
										
										
											2019-11-20 23:50:05 +08:00
										 |  |  | 		}, { | 
					
						
							|  |  |  | 			// When "honor_labels" is not set
 | 
					
						
							|  |  |  | 			// exported label from discovery don't get overwritten
 | 
					
						
							|  |  |  | 			title:           "Label name collision", | 
					
						
							|  |  |  | 			honorLabels:     false, | 
					
						
							|  |  |  | 			scrapeLabels:    `metric 0`, | 
					
						
							|  |  |  | 			discoveryLabels: []string{"n", "2", "exported_n", "2"}, | 
					
						
							|  |  |  | 			expLset:         labels.FromStrings("__name__", "metric", "n", "2", "exported_n", "2"), | 
					
						
							|  |  |  | 			expValue:        0, | 
					
						
							| 
									
										
										
										
											2018-02-15 22:26:24 +08:00
										 |  |  | 		}, { | 
					
						
							|  |  |  | 			// Labels with no value need to be removed as these should not be ingested.
 | 
					
						
							|  |  |  | 			title:           "Delete Empty labels", | 
					
						
							|  |  |  | 			honorLabels:     false, | 
					
						
							|  |  |  | 			scrapeLabels:    `metric{n=""} 0`, | 
					
						
							|  |  |  | 			discoveryLabels: nil, | 
					
						
							|  |  |  | 			expLset:         labels.FromStrings("__name__", "metric"), | 
					
						
							|  |  |  | 			expValue:        0, | 
					
						
							|  |  |  | 		}, { | 
					
						
							|  |  |  | 			// Honor Labels should ignore labels with the same name.
 | 
					
						
							|  |  |  | 			title:           "Honor Labels", | 
					
						
							|  |  |  | 			honorLabels:     true, | 
					
						
							|  |  |  | 			scrapeLabels:    `metric{n1="1" n2="2"} 0`, | 
					
						
							|  |  |  | 			discoveryLabels: []string{"n1", "0"}, | 
					
						
							|  |  |  | 			expLset:         labels.FromStrings("__name__", "metric", "n1", "1", "n2", "2"), | 
					
						
							|  |  |  | 			expValue:        0, | 
					
						
							|  |  |  | 		}, { | 
					
						
							|  |  |  | 			title:           "Stale - NaN", | 
					
						
							|  |  |  | 			honorLabels:     false, | 
					
						
							|  |  |  | 			scrapeLabels:    `metric NaN`, | 
					
						
							|  |  |  | 			discoveryLabels: nil, | 
					
						
							|  |  |  | 			expLset:         labels.FromStrings("__name__", "metric"), | 
					
						
							|  |  |  | 			expValue:        float64(value.NormalNaN), | 
					
						
							| 
									
										
										
										
											2017-04-11 22:42:17 +08:00
										 |  |  | 		}, | 
					
						
							|  |  |  | 	} | 
					
						
							| 
									
										
										
										
											2018-02-15 22:26:24 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	for _, test := range tests { | 
					
						
							|  |  |  | 		app := &collectResultAppender{} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 		discoveryLabels := &Target{ | 
					
						
							|  |  |  | 			labels: labels.FromStrings(test.discoveryLabels...), | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-30 19:11:13 +08:00
										 |  |  | 		sl := newScrapeLoop(context.Background(), | 
					
						
							| 
									
										
										
										
											2018-02-15 22:26:24 +08:00
										 |  |  | 			nil, nil, nil, | 
					
						
							|  |  |  | 			func(l labels.Labels) labels.Labels { | 
					
						
							| 
									
										
										
										
											2018-04-12 22:54:53 +08:00
										 |  |  | 				return mutateSampleLabels(l, discoveryLabels, test.honorLabels, nil) | 
					
						
							| 
									
										
										
										
											2018-02-15 22:26:24 +08:00
										 |  |  | 			}, | 
					
						
							|  |  |  | 			func(l labels.Labels) labels.Labels { | 
					
						
							| 
									
										
										
										
											2018-04-12 22:54:53 +08:00
										 |  |  | 				return mutateReportSampleLabels(l, discoveryLabels) | 
					
						
							| 
									
										
										
										
											2018-02-15 22:26:24 +08:00
										 |  |  | 			}, | 
					
						
							| 
									
										
										
										
											2020-07-24 22:10:51 +08:00
										 |  |  | 			func(ctx context.Context) storage.Appender { return app }, | 
					
						
							| 
									
										
										
										
											2018-05-18 15:32:11 +08:00
										 |  |  | 			nil, | 
					
						
							| 
									
										
										
										
											2019-03-12 18:46:15 +08:00
										 |  |  | 			0, | 
					
						
							| 
									
										
										
										
											2019-03-15 18:04:15 +08:00
										 |  |  | 			true, | 
					
						
							| 
									
										
										
										
											2021-09-03 21:37:42 +08:00
										 |  |  | 			0, | 
					
						
							| 
									
										
										
										
											2021-05-06 16:56:21 +08:00
										 |  |  | 			nil, | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 			0, | 
					
						
							|  |  |  | 			0, | 
					
						
							| 
									
										
										
										
											2021-08-24 20:31:14 +08:00
										 |  |  | 			false, | 
					
						
							| 
									
										
										
										
											2018-02-15 22:26:24 +08:00
										 |  |  | 		) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 		now := time.Now() | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-30 19:11:13 +08:00
										 |  |  | 		slApp := sl.appender(context.Background()) | 
					
						
							| 
									
										
										
										
											2020-07-16 19:53:39 +08:00
										 |  |  | 		_, _, _, err := sl.append(slApp, []byte(test.scrapeLabels), "", now) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 		require.NoError(t, err) | 
					
						
							|  |  |  | 		require.NoError(t, slApp.Commit()) | 
					
						
							| 
									
										
										
										
											2018-02-15 22:26:24 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 		expected := []sample{ | 
					
						
							|  |  |  | 			{ | 
					
						
							|  |  |  | 				metric: test.expLset, | 
					
						
							|  |  |  | 				t:      timestamp.FromTime(now), | 
					
						
							|  |  |  | 				v:      test.expValue, | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 		// When the expected value is NaN
 | 
					
						
							|  |  |  | 		// DeepEqual will report NaNs as being different,
 | 
					
						
							|  |  |  | 		// so replace it with the expected one.
 | 
					
						
							|  |  |  | 		if test.expValue == float64(value.NormalNaN) { | 
					
						
							|  |  |  | 			app.result[0].v = expected[0].v | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 		t.Logf("Test:%s", test.title) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 		require.Equal(t, expected, app.result) | 
					
						
							| 
									
										
										
										
											2017-04-11 22:42:17 +08:00
										 |  |  | 	} | 
					
						
							| 
									
										
										
										
											2017-04-14 17:41:18 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-03-26 10:31:48 +08:00
										 |  |  | func TestScrapeLoopAppendCacheEntryButErrNotFound(t *testing.T) { | 
					
						
							|  |  |  | 	// collectResultAppender's AddFast always returns ErrNotFound if we don't give it a next.
 | 
					
						
							|  |  |  | 	app := &collectResultAppender{} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-30 19:11:13 +08:00
										 |  |  | 	sl := newScrapeLoop(context.Background(), | 
					
						
							| 
									
										
										
										
											2020-03-26 10:31:48 +08:00
										 |  |  | 		nil, nil, nil, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							| 
									
										
										
										
											2020-07-24 22:10:51 +08:00
										 |  |  | 		func(ctx context.Context) storage.Appender { return app }, | 
					
						
							| 
									
										
										
										
											2020-03-26 10:31:48 +08:00
										 |  |  | 		nil, | 
					
						
							|  |  |  | 		0, | 
					
						
							|  |  |  | 		true, | 
					
						
							| 
									
										
										
										
											2021-09-03 21:37:42 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-05-06 16:56:21 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		0, | 
					
						
							|  |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-08-24 20:31:14 +08:00
										 |  |  | 		false, | 
					
						
							| 
									
										
										
										
											2020-03-26 10:31:48 +08:00
										 |  |  | 	) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	fakeRef := uint64(1) | 
					
						
							|  |  |  | 	expValue := float64(1) | 
					
						
							|  |  |  | 	metric := `metric{n="1"} 1` | 
					
						
							|  |  |  | 	p := textparse.New([]byte(metric), "") | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	var lset labels.Labels | 
					
						
							|  |  |  | 	p.Next() | 
					
						
							|  |  |  | 	mets := p.Metric(&lset) | 
					
						
							|  |  |  | 	hash := lset.Hash() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	// Create a fake entry in the cache
 | 
					
						
							|  |  |  | 	sl.cache.addRef(mets, fakeRef, lset, hash) | 
					
						
							|  |  |  | 	now := time.Now() | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-30 19:11:13 +08:00
										 |  |  | 	slApp := sl.appender(context.Background()) | 
					
						
							| 
									
										
										
										
											2020-07-16 19:53:39 +08:00
										 |  |  | 	_, _, _, err := sl.append(slApp, []byte(metric), "", now) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.NoError(t, err) | 
					
						
							|  |  |  | 	require.NoError(t, slApp.Commit()) | 
					
						
							| 
									
										
										
										
											2020-03-26 10:31:48 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	expected := []sample{ | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			metric: lset, | 
					
						
							|  |  |  | 			t:      timestamp.FromTime(now), | 
					
						
							|  |  |  | 			v:      expValue, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.Equal(t, expected, app.result) | 
					
						
							| 
									
										
										
										
											2020-03-26 10:31:48 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2018-01-09 23:43:28 +08:00
										 |  |  | func TestScrapeLoopAppendSampleLimit(t *testing.T) { | 
					
						
							|  |  |  | 	resApp := &collectResultAppender{} | 
					
						
							|  |  |  | 	app := &limitAppender{Appender: resApp, limit: 1} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-30 19:11:13 +08:00
										 |  |  | 	sl := newScrapeLoop(context.Background(), | 
					
						
							| 
									
										
										
										
											2018-01-09 23:43:28 +08:00
										 |  |  | 		nil, nil, nil, | 
					
						
							| 
									
										
										
										
											2020-01-30 01:47:36 +08:00
										 |  |  | 		func(l labels.Labels) labels.Labels { | 
					
						
							|  |  |  | 			if l.Has("deleteme") { | 
					
						
							|  |  |  | 				return nil | 
					
						
							|  |  |  | 			} | 
					
						
							|  |  |  | 			return l | 
					
						
							|  |  |  | 		}, | 
					
						
							| 
									
										
										
										
											2018-01-09 23:43:28 +08:00
										 |  |  | 		nopMutator, | 
					
						
							| 
									
										
										
										
											2020-07-24 22:10:51 +08:00
										 |  |  | 		func(ctx context.Context) storage.Appender { return app }, | 
					
						
							| 
									
										
										
										
											2018-05-18 15:32:11 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2019-03-12 18:46:15 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2019-03-15 18:04:15 +08:00
										 |  |  | 		true, | 
					
						
							| 
									
										
										
										
											2021-09-03 21:37:42 +08:00
										 |  |  | 		app.limit, | 
					
						
							| 
									
										
										
										
											2021-05-06 16:56:21 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		0, | 
					
						
							|  |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-08-24 20:31:14 +08:00
										 |  |  | 		false, | 
					
						
							| 
									
										
										
										
											2018-01-09 23:43:28 +08:00
										 |  |  | 	) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	// Get the value of the Counter before performing the append.
 | 
					
						
							|  |  |  | 	beforeMetric := dto.Metric{} | 
					
						
							|  |  |  | 	err := targetScrapeSampleLimit.Write(&beforeMetric) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.NoError(t, err) | 
					
						
							| 
									
										
										
										
											2019-11-05 07:43:42 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2018-01-09 23:43:28 +08:00
										 |  |  | 	beforeMetricValue := beforeMetric.GetCounter().GetValue() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	now := time.Now() | 
					
						
							| 
									
										
										
										
											2020-07-30 19:11:13 +08:00
										 |  |  | 	slApp := sl.appender(context.Background()) | 
					
						
							| 
									
										
										
										
											2020-07-16 19:53:39 +08:00
										 |  |  | 	total, added, seriesAdded, err := sl.append(app, []byte("metric_a 1\nmetric_b 1\nmetric_c 1\n"), "", now) | 
					
						
							| 
									
										
										
										
											2018-01-09 23:43:28 +08:00
										 |  |  | 	if err != errSampleLimit { | 
					
						
							|  |  |  | 		t.Fatalf("Did not see expected sample limit error: %s", err) | 
					
						
							|  |  |  | 	} | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.NoError(t, slApp.Rollback()) | 
					
						
							|  |  |  | 	require.Equal(t, 3, total) | 
					
						
							|  |  |  | 	require.Equal(t, 3, added) | 
					
						
							|  |  |  | 	require.Equal(t, 1, seriesAdded) | 
					
						
							| 
									
										
										
										
											2018-01-09 23:43:28 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-02-10 17:46:20 +08:00
										 |  |  | 	// Check that the Counter has been incremented a single time for the scrape,
 | 
					
						
							| 
									
										
										
										
											2018-01-09 23:43:28 +08:00
										 |  |  | 	// not multiple times for each sample.
 | 
					
						
							|  |  |  | 	metric := dto.Metric{} | 
					
						
							|  |  |  | 	err = targetScrapeSampleLimit.Write(&metric) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.NoError(t, err) | 
					
						
							| 
									
										
										
										
											2019-11-05 07:43:42 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2018-01-09 23:43:28 +08:00
										 |  |  | 	value := metric.GetCounter().GetValue() | 
					
						
							| 
									
										
										
										
											2019-11-05 07:43:42 +08:00
										 |  |  | 	change := value - beforeMetricValue | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.Equal(t, 1.0, change, "Unexpected change of sample limit metric: %f", change) | 
					
						
							| 
									
										
										
										
											2018-01-09 23:43:28 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	// And verify that we got the samples that fit under the limit.
 | 
					
						
							|  |  |  | 	want := []sample{ | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			metric: labels.FromStrings(model.MetricNameLabel, "metric_a"), | 
					
						
							|  |  |  | 			t:      timestamp.FromTime(now), | 
					
						
							|  |  |  | 			v:      1, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 	} | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.Equal(t, want, resApp.rolledbackResult, "Appended samples not as expected") | 
					
						
							| 
									
										
										
										
											2020-01-30 01:47:36 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	now = time.Now() | 
					
						
							| 
									
										
										
										
											2020-07-30 19:11:13 +08:00
										 |  |  | 	slApp = sl.appender(context.Background()) | 
					
						
							| 
									
										
										
										
											2020-07-16 19:53:39 +08:00
										 |  |  | 	total, added, seriesAdded, err = sl.append(slApp, []byte("metric_a 1\nmetric_b 1\nmetric_c{deleteme=\"yes\"} 1\nmetric_d 1\nmetric_e 1\nmetric_f 1\nmetric_g 1\nmetric_h{deleteme=\"yes\"} 1\nmetric_i{deleteme=\"yes\"} 1\n"), "", now) | 
					
						
							| 
									
										
										
										
											2020-01-30 01:47:36 +08:00
										 |  |  | 	if err != errSampleLimit { | 
					
						
							|  |  |  | 		t.Fatalf("Did not see expected sample limit error: %s", err) | 
					
						
							|  |  |  | 	} | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.NoError(t, slApp.Rollback()) | 
					
						
							|  |  |  | 	require.Equal(t, 9, total) | 
					
						
							|  |  |  | 	require.Equal(t, 6, added) | 
					
						
							|  |  |  | 	require.Equal(t, 0, seriesAdded) | 
					
						
							| 
									
										
										
										
											2018-01-09 23:43:28 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-09-15 17:08:51 +08:00
										 |  |  | func TestScrapeLoop_ChangingMetricString(t *testing.T) { | 
					
						
							|  |  |  | 	// This is a regression test for the scrape loop cache not properly maintaining
 | 
					
						
							|  |  |  | 	// IDs when the string representation of a metric changes across a scrape. Thus
 | 
					
						
							|  |  |  | 	// we use a real storage appender here.
 | 
					
						
							| 
									
										
										
										
											2019-08-09 09:35:39 +08:00
										 |  |  | 	s := teststorage.New(t) | 
					
						
							| 
									
										
										
										
											2017-09-15 17:08:51 +08:00
										 |  |  | 	defer s.Close() | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-17 18:30:22 +08:00
										 |  |  | 	capp := &collectResultAppender{} | 
					
						
							| 
									
										
										
										
											2017-09-15 17:08:51 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-30 19:11:13 +08:00
										 |  |  | 	sl := newScrapeLoop(context.Background(), | 
					
						
							| 
									
										
										
										
											2017-09-15 17:08:51 +08:00
										 |  |  | 		nil, nil, nil, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							| 
									
										
										
										
											2020-07-24 22:10:51 +08:00
										 |  |  | 		func(ctx context.Context) storage.Appender { capp.next = s.Appender(ctx); return capp }, | 
					
						
							| 
									
										
										
										
											2018-05-18 15:32:11 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2019-03-12 18:46:15 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2019-03-15 18:04:15 +08:00
										 |  |  | 		true, | 
					
						
							| 
									
										
										
										
											2021-09-03 21:37:42 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-05-06 16:56:21 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		0, | 
					
						
							|  |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-08-24 20:31:14 +08:00
										 |  |  | 		false, | 
					
						
							| 
									
										
										
										
											2017-09-15 17:08:51 +08:00
										 |  |  | 	) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	now := time.Now() | 
					
						
							| 
									
										
										
										
											2020-07-30 19:11:13 +08:00
										 |  |  | 	slApp := sl.appender(context.Background()) | 
					
						
							| 
									
										
										
										
											2020-07-16 19:53:39 +08:00
										 |  |  | 	_, _, _, err := sl.append(slApp, []byte(`metric_a{a="1",b="1"} 1`), "", now) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.NoError(t, err) | 
					
						
							|  |  |  | 	require.NoError(t, slApp.Commit()) | 
					
						
							| 
									
										
										
										
											2019-11-05 07:43:42 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-30 19:11:13 +08:00
										 |  |  | 	slApp = sl.appender(context.Background()) | 
					
						
							| 
									
										
										
										
											2020-07-16 19:53:39 +08:00
										 |  |  | 	_, _, _, err = sl.append(slApp, []byte(`metric_a{b="1",a="1"} 2`), "", now.Add(time.Minute)) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.NoError(t, err) | 
					
						
							|  |  |  | 	require.NoError(t, slApp.Commit()) | 
					
						
							| 
									
										
										
										
											2017-09-15 17:08:51 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	// DeepEqual will report NaNs as being different, so replace with a different value.
 | 
					
						
							|  |  |  | 	want := []sample{ | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			metric: labels.FromStrings("__name__", "metric_a", "a", "1", "b", "1"), | 
					
						
							|  |  |  | 			t:      timestamp.FromTime(now), | 
					
						
							|  |  |  | 			v:      1, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			metric: labels.FromStrings("__name__", "metric_a", "a", "1", "b", "1"), | 
					
						
							|  |  |  | 			t:      timestamp.FromTime(now.Add(time.Minute)), | 
					
						
							|  |  |  | 			v:      2, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 	} | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.Equal(t, want, capp.result, "Appended samples not as expected") | 
					
						
							| 
									
										
										
										
											2017-09-15 17:08:51 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-04-14 17:41:18 +08:00
										 |  |  | func TestScrapeLoopAppendStaleness(t *testing.T) { | 
					
						
							|  |  |  | 	app := &collectResultAppender{} | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-30 19:11:13 +08:00
										 |  |  | 	sl := newScrapeLoop(context.Background(), | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 		nil, nil, nil, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							| 
									
										
										
										
											2020-07-24 22:10:51 +08:00
										 |  |  | 		func(ctx context.Context) storage.Appender { return app }, | 
					
						
							| 
									
										
										
										
											2018-05-18 15:32:11 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2019-03-12 18:46:15 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2019-03-15 18:04:15 +08:00
										 |  |  | 		true, | 
					
						
							| 
									
										
										
										
											2021-09-03 21:37:42 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-05-06 16:56:21 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		0, | 
					
						
							|  |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-08-24 20:31:14 +08:00
										 |  |  | 		false, | 
					
						
							| 
									
										
										
										
											2017-05-26 16:44:48 +08:00
										 |  |  | 	) | 
					
						
							| 
									
										
										
										
											2017-04-14 17:41:18 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	now := time.Now() | 
					
						
							| 
									
										
										
										
											2020-07-30 19:11:13 +08:00
										 |  |  | 	slApp := sl.appender(context.Background()) | 
					
						
							| 
									
										
										
										
											2020-07-16 19:53:39 +08:00
										 |  |  | 	_, _, _, err := sl.append(slApp, []byte("metric_a 1\n"), "", now) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.NoError(t, err) | 
					
						
							|  |  |  | 	require.NoError(t, slApp.Commit()) | 
					
						
							| 
									
										
										
										
											2019-11-05 07:43:42 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-30 19:11:13 +08:00
										 |  |  | 	slApp = sl.appender(context.Background()) | 
					
						
							| 
									
										
										
										
											2020-07-16 19:53:39 +08:00
										 |  |  | 	_, _, _, err = sl.append(slApp, []byte(""), "", now.Add(time.Second)) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.NoError(t, err) | 
					
						
							|  |  |  | 	require.NoError(t, slApp.Commit()) | 
					
						
							| 
									
										
										
										
											2017-04-14 17:41:18 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	ingestedNaN := math.Float64bits(app.result[1].v) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.Equal(t, value.StaleNaN, ingestedNaN, "Appended stale sample wasn't as expected") | 
					
						
							| 
									
										
										
										
											2017-04-14 17:41:18 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	// DeepEqual will report NaNs as being different, so replace with a different value.
 | 
					
						
							|  |  |  | 	app.result[1].v = 42 | 
					
						
							|  |  |  | 	want := []sample{ | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			metric: labels.FromStrings(model.MetricNameLabel, "metric_a"), | 
					
						
							|  |  |  | 			t:      timestamp.FromTime(now), | 
					
						
							|  |  |  | 			v:      1, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			metric: labels.FromStrings(model.MetricNameLabel, "metric_a"), | 
					
						
							|  |  |  | 			t:      timestamp.FromTime(now.Add(time.Second)), | 
					
						
							|  |  |  | 			v:      42, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 	} | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.Equal(t, want, app.result, "Appended samples not as expected") | 
					
						
							| 
									
										
										
										
											2017-04-11 22:42:17 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-04-28 23:36:36 +08:00
										 |  |  | func TestScrapeLoopAppendNoStalenessIfTimestamp(t *testing.T) { | 
					
						
							|  |  |  | 	app := &collectResultAppender{} | 
					
						
							| 
									
										
										
										
											2020-07-30 19:11:13 +08:00
										 |  |  | 	sl := newScrapeLoop(context.Background(), | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 		nil, nil, nil, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							| 
									
										
										
										
											2020-07-24 22:10:51 +08:00
										 |  |  | 		func(ctx context.Context) storage.Appender { return app }, | 
					
						
							| 
									
										
										
										
											2018-05-18 15:32:11 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2019-03-12 18:46:15 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2019-03-15 18:04:15 +08:00
										 |  |  | 		true, | 
					
						
							| 
									
										
										
										
											2021-09-03 21:37:42 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-05-06 16:56:21 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		0, | 
					
						
							|  |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-08-24 20:31:14 +08:00
										 |  |  | 		false, | 
					
						
							| 
									
										
										
										
											2017-05-26 16:44:48 +08:00
										 |  |  | 	) | 
					
						
							| 
									
										
										
										
											2017-04-28 23:36:36 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	now := time.Now() | 
					
						
							| 
									
										
										
										
											2020-07-30 19:11:13 +08:00
										 |  |  | 	slApp := sl.appender(context.Background()) | 
					
						
							| 
									
										
										
										
											2020-07-16 19:53:39 +08:00
										 |  |  | 	_, _, _, err := sl.append(slApp, []byte("metric_a 1 1000\n"), "", now) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.NoError(t, err) | 
					
						
							|  |  |  | 	require.NoError(t, slApp.Commit()) | 
					
						
							| 
									
										
										
										
											2019-11-05 07:43:42 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-30 19:11:13 +08:00
										 |  |  | 	slApp = sl.appender(context.Background()) | 
					
						
							| 
									
										
										
										
											2020-07-16 19:53:39 +08:00
										 |  |  | 	_, _, _, err = sl.append(slApp, []byte(""), "", now.Add(time.Second)) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.NoError(t, err) | 
					
						
							|  |  |  | 	require.NoError(t, slApp.Commit()) | 
					
						
							| 
									
										
										
										
											2017-04-28 23:36:36 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	want := []sample{ | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			metric: labels.FromStrings(model.MetricNameLabel, "metric_a"), | 
					
						
							|  |  |  | 			t:      1000, | 
					
						
							|  |  |  | 			v:      1, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 	} | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.Equal(t, want, app.result, "Appended samples not as expected") | 
					
						
							| 
									
										
										
										
											2017-05-29 21:08:55 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-03-16 17:47:45 +08:00
										 |  |  | func TestScrapeLoopAppendExemplar(t *testing.T) { | 
					
						
							|  |  |  | 	tests := []struct { | 
					
						
							|  |  |  | 		title           string | 
					
						
							|  |  |  | 		scrapeText      string | 
					
						
							|  |  |  | 		discoveryLabels []string | 
					
						
							|  |  |  | 		samples         []sample | 
					
						
							|  |  |  | 		exemplars       []exemplar.Exemplar | 
					
						
							|  |  |  | 	}{ | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			title:           "Metric without exemplars", | 
					
						
							|  |  |  | 			scrapeText:      "metric_total{n=\"1\"} 0\n# EOF", | 
					
						
							|  |  |  | 			discoveryLabels: []string{"n", "2"}, | 
					
						
							|  |  |  | 			samples: []sample{{ | 
					
						
							|  |  |  | 				metric: labels.FromStrings("__name__", "metric_total", "exported_n", "1", "n", "2"), | 
					
						
							|  |  |  | 				v:      0, | 
					
						
							|  |  |  | 			}}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			title:           "Metric with exemplars", | 
					
						
							|  |  |  | 			scrapeText:      "metric_total{n=\"1\"} 0 # {a=\"abc\"} 1.0\n# EOF", | 
					
						
							|  |  |  | 			discoveryLabels: []string{"n", "2"}, | 
					
						
							|  |  |  | 			samples: []sample{{ | 
					
						
							|  |  |  | 				metric: labels.FromStrings("__name__", "metric_total", "exported_n", "1", "n", "2"), | 
					
						
							|  |  |  | 				v:      0, | 
					
						
							|  |  |  | 			}}, | 
					
						
							|  |  |  | 			exemplars: []exemplar.Exemplar{ | 
					
						
							|  |  |  | 				{Labels: labels.FromStrings("a", "abc"), Value: 1}, | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, { | 
					
						
							|  |  |  | 			title:           "Metric with exemplars and TS", | 
					
						
							|  |  |  | 			scrapeText:      "metric_total{n=\"1\"} 0 # {a=\"abc\"} 1.0 10000\n# EOF", | 
					
						
							|  |  |  | 			discoveryLabels: []string{"n", "2"}, | 
					
						
							|  |  |  | 			samples: []sample{{ | 
					
						
							|  |  |  | 				metric: labels.FromStrings("__name__", "metric_total", "exported_n", "1", "n", "2"), | 
					
						
							|  |  |  | 				v:      0, | 
					
						
							|  |  |  | 			}}, | 
					
						
							|  |  |  | 			exemplars: []exemplar.Exemplar{ | 
					
						
							|  |  |  | 				{Labels: labels.FromStrings("a", "abc"), Value: 1, Ts: 10000000, HasTs: true}, | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, { | 
					
						
							|  |  |  | 			title: "Two metrics and exemplars", | 
					
						
							|  |  |  | 			scrapeText: `metric_total{n="1"} 1 # {t="1"} 1.0 10000 | 
					
						
							|  |  |  | metric_total{n="2"} 2 # {t="2"} 2.0 20000 | 
					
						
							|  |  |  | # EOF`, | 
					
						
							|  |  |  | 			samples: []sample{{ | 
					
						
							|  |  |  | 				metric: labels.FromStrings("__name__", "metric_total", "n", "1"), | 
					
						
							|  |  |  | 				v:      1, | 
					
						
							|  |  |  | 			}, { | 
					
						
							|  |  |  | 				metric: labels.FromStrings("__name__", "metric_total", "n", "2"), | 
					
						
							|  |  |  | 				v:      2, | 
					
						
							|  |  |  | 			}}, | 
					
						
							|  |  |  | 			exemplars: []exemplar.Exemplar{ | 
					
						
							|  |  |  | 				{Labels: labels.FromStrings("t", "1"), Value: 1, Ts: 10000000, HasTs: true}, | 
					
						
							|  |  |  | 				{Labels: labels.FromStrings("t", "2"), Value: 2, Ts: 20000000, HasTs: true}, | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	for _, test := range tests { | 
					
						
							|  |  |  | 		t.Run(test.title, func(t *testing.T) { | 
					
						
							|  |  |  | 			app := &collectResultAppender{} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 			discoveryLabels := &Target{ | 
					
						
							|  |  |  | 				labels: labels.FromStrings(test.discoveryLabels...), | 
					
						
							|  |  |  | 			} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 			sl := newScrapeLoop(context.Background(), | 
					
						
							|  |  |  | 				nil, nil, nil, | 
					
						
							|  |  |  | 				func(l labels.Labels) labels.Labels { | 
					
						
							|  |  |  | 					return mutateSampleLabels(l, discoveryLabels, false, nil) | 
					
						
							|  |  |  | 				}, | 
					
						
							|  |  |  | 				func(l labels.Labels) labels.Labels { | 
					
						
							|  |  |  | 					return mutateReportSampleLabels(l, discoveryLabels) | 
					
						
							|  |  |  | 				}, | 
					
						
							|  |  |  | 				func(ctx context.Context) storage.Appender { return app }, | 
					
						
							|  |  |  | 				nil, | 
					
						
							|  |  |  | 				0, | 
					
						
							|  |  |  | 				true, | 
					
						
							| 
									
										
										
										
											2021-09-03 21:37:42 +08:00
										 |  |  | 				0, | 
					
						
							| 
									
										
										
										
											2021-05-06 16:56:21 +08:00
										 |  |  | 				nil, | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 				0, | 
					
						
							|  |  |  | 				0, | 
					
						
							| 
									
										
										
										
											2021-08-24 20:31:14 +08:00
										 |  |  | 				false, | 
					
						
							| 
									
										
										
										
											2021-03-16 17:47:45 +08:00
										 |  |  | 			) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 			now := time.Now() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 			for i := range test.samples { | 
					
						
							|  |  |  | 				test.samples[i].t = timestamp.FromTime(now) | 
					
						
							|  |  |  | 			} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 			// We need to set the timestamp for expected exemplars that does not have a timestamp.
 | 
					
						
							|  |  |  | 			for i := range test.exemplars { | 
					
						
							|  |  |  | 				if test.exemplars[i].Ts == 0 { | 
					
						
							|  |  |  | 					test.exemplars[i].Ts = timestamp.FromTime(now) | 
					
						
							|  |  |  | 				} | 
					
						
							|  |  |  | 			} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 			_, _, _, err := sl.append(app, []byte(test.scrapeText), "application/openmetrics-text", now) | 
					
						
							|  |  |  | 			require.NoError(t, err) | 
					
						
							|  |  |  | 			require.NoError(t, app.Commit()) | 
					
						
							|  |  |  | 			require.Equal(t, test.samples, app.result) | 
					
						
							|  |  |  | 			require.Equal(t, test.exemplars, app.resultExemplars) | 
					
						
							|  |  |  | 		}) | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | func TestScrapeLoopAppendExemplarSeries(t *testing.T) { | 
					
						
							|  |  |  | 	scrapeText := []string{`metric_total{n="1"} 1 # {t="1"} 1.0 10000 | 
					
						
							|  |  |  | # EOF`, `metric_total{n="1"} 2 # {t="2"} 2.0 20000 | 
					
						
							|  |  |  | # EOF`} | 
					
						
							|  |  |  | 	samples := []sample{{ | 
					
						
							|  |  |  | 		metric: labels.FromStrings("__name__", "metric_total", "n", "1"), | 
					
						
							|  |  |  | 		v:      1, | 
					
						
							|  |  |  | 	}, { | 
					
						
							|  |  |  | 		metric: labels.FromStrings("__name__", "metric_total", "n", "1"), | 
					
						
							|  |  |  | 		v:      2, | 
					
						
							|  |  |  | 	}} | 
					
						
							|  |  |  | 	exemplars := []exemplar.Exemplar{ | 
					
						
							|  |  |  | 		{Labels: labels.FromStrings("t", "1"), Value: 1, Ts: 10000000, HasTs: true}, | 
					
						
							|  |  |  | 		{Labels: labels.FromStrings("t", "2"), Value: 2, Ts: 20000000, HasTs: true}, | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 	discoveryLabels := &Target{ | 
					
						
							|  |  |  | 		labels: labels.FromStrings(), | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	app := &collectResultAppender{} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	sl := newScrapeLoop(context.Background(), | 
					
						
							|  |  |  | 		nil, nil, nil, | 
					
						
							|  |  |  | 		func(l labels.Labels) labels.Labels { | 
					
						
							|  |  |  | 			return mutateSampleLabels(l, discoveryLabels, false, nil) | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		func(l labels.Labels) labels.Labels { | 
					
						
							|  |  |  | 			return mutateReportSampleLabels(l, discoveryLabels) | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		func(ctx context.Context) storage.Appender { return app }, | 
					
						
							|  |  |  | 		nil, | 
					
						
							|  |  |  | 		0, | 
					
						
							|  |  |  | 		true, | 
					
						
							| 
									
										
										
										
											2021-09-03 21:37:42 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-05-06 16:56:21 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		0, | 
					
						
							|  |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-08-24 20:31:14 +08:00
										 |  |  | 		false, | 
					
						
							| 
									
										
										
										
											2021-03-16 17:47:45 +08:00
										 |  |  | 	) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	now := time.Now() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	for i := range samples { | 
					
						
							|  |  |  | 		ts := now.Add(time.Second * time.Duration(i)) | 
					
						
							|  |  |  | 		samples[i].t = timestamp.FromTime(ts) | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	// We need to set the timestamp for expected exemplars that does not have a timestamp.
 | 
					
						
							|  |  |  | 	for i := range exemplars { | 
					
						
							|  |  |  | 		if exemplars[i].Ts == 0 { | 
					
						
							|  |  |  | 			ts := now.Add(time.Second * time.Duration(i)) | 
					
						
							|  |  |  | 			exemplars[i].Ts = timestamp.FromTime(ts) | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	for i, st := range scrapeText { | 
					
						
							|  |  |  | 		_, _, _, err := sl.append(app, []byte(st), "application/openmetrics-text", timestamp.Time(samples[i].t)) | 
					
						
							|  |  |  | 		require.NoError(t, err) | 
					
						
							|  |  |  | 		require.NoError(t, app.Commit()) | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	require.Equal(t, samples, app.result) | 
					
						
							|  |  |  | 	require.Equal(t, exemplars, app.resultExemplars) | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-06-15 10:08:03 +08:00
										 |  |  | func TestScrapeLoopRunReportsTargetDownOnScrapeError(t *testing.T) { | 
					
						
							|  |  |  | 	var ( | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 		scraper  = &testScraper{} | 
					
						
							|  |  |  | 		appender = &collectResultAppender{} | 
					
						
							| 
									
										
										
										
											2020-07-24 22:10:51 +08:00
										 |  |  | 		app      = func(ctx context.Context) storage.Appender { return appender } | 
					
						
							| 
									
										
										
										
											2017-06-15 10:08:03 +08:00
										 |  |  | 	) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	ctx, cancel := context.WithCancel(context.Background()) | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 	sl := newScrapeLoop(ctx, | 
					
						
							|  |  |  | 		scraper, | 
					
						
							|  |  |  | 		nil, nil, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							|  |  |  | 		app, | 
					
						
							| 
									
										
										
										
											2018-05-18 15:32:11 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2019-03-12 18:46:15 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2019-03-15 18:04:15 +08:00
										 |  |  | 		true, | 
					
						
							| 
									
										
										
										
											2021-09-03 21:37:42 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-05-06 16:56:21 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		10*time.Millisecond, | 
					
						
							|  |  |  | 		time.Hour, | 
					
						
							| 
									
										
										
										
											2021-08-24 20:31:14 +08:00
										 |  |  | 		false, | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 	) | 
					
						
							| 
									
										
										
										
											2017-06-15 10:08:03 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	scraper.scrapeFunc = func(ctx context.Context, w io.Writer) error { | 
					
						
							|  |  |  | 		cancel() | 
					
						
							| 
									
										
										
										
											2019-03-26 07:01:12 +08:00
										 |  |  | 		return errors.New("scrape failed") | 
					
						
							| 
									
										
										
										
											2017-06-15 10:08:03 +08:00
										 |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 	sl.run(nil) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.Equal(t, 0.0, appender.result[0].v, "bad 'up' value") | 
					
						
							| 
									
										
										
										
											2017-06-15 10:08:03 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-06-16 20:09:50 +08:00
										 |  |  | func TestScrapeLoopRunReportsTargetDownOnInvalidUTF8(t *testing.T) { | 
					
						
							|  |  |  | 	var ( | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 		scraper  = &testScraper{} | 
					
						
							|  |  |  | 		appender = &collectResultAppender{} | 
					
						
							| 
									
										
										
										
											2020-07-24 22:10:51 +08:00
										 |  |  | 		app      = func(ctx context.Context) storage.Appender { return appender } | 
					
						
							| 
									
										
										
										
											2017-06-16 20:09:50 +08:00
										 |  |  | 	) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	ctx, cancel := context.WithCancel(context.Background()) | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 	sl := newScrapeLoop(ctx, | 
					
						
							|  |  |  | 		scraper, | 
					
						
							|  |  |  | 		nil, nil, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							|  |  |  | 		app, | 
					
						
							| 
									
										
										
										
											2018-05-18 15:32:11 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2019-03-12 18:46:15 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2019-03-15 18:04:15 +08:00
										 |  |  | 		true, | 
					
						
							| 
									
										
										
										
											2021-09-03 21:37:42 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-05-06 16:56:21 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		10*time.Millisecond, | 
					
						
							|  |  |  | 		time.Hour, | 
					
						
							| 
									
										
										
										
											2021-08-24 20:31:14 +08:00
										 |  |  | 		false, | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 	) | 
					
						
							| 
									
										
										
										
											2017-06-16 20:09:50 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	scraper.scrapeFunc = func(ctx context.Context, w io.Writer) error { | 
					
						
							|  |  |  | 		cancel() | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 		w.Write([]byte("a{l=\"\xff\"} 1\n")) | 
					
						
							| 
									
										
										
										
											2017-06-16 20:09:50 +08:00
										 |  |  | 		return nil | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 	sl.run(nil) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.Equal(t, 0.0, appender.result[0].v, "bad 'up' value") | 
					
						
							| 
									
										
										
										
											2017-06-16 20:09:50 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-05-04 00:20:07 +08:00
										 |  |  | type errorAppender struct { | 
					
						
							|  |  |  | 	collectResultAppender | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-02-18 20:07:00 +08:00
										 |  |  | func (app *errorAppender) Append(ref uint64, lset labels.Labels, t int64, v float64) (uint64, error) { | 
					
						
							| 
									
										
										
										
											2017-07-04 20:55:33 +08:00
										 |  |  | 	switch lset.Get(model.MetricNameLabel) { | 
					
						
							|  |  |  | 	case "out_of_order": | 
					
						
							| 
									
										
										
										
											2017-09-07 20:14:41 +08:00
										 |  |  | 		return 0, storage.ErrOutOfOrderSample | 
					
						
							| 
									
										
										
										
											2017-07-04 20:55:33 +08:00
										 |  |  | 	case "amend": | 
					
						
							| 
									
										
										
										
											2017-09-07 20:14:41 +08:00
										 |  |  | 		return 0, storage.ErrDuplicateSampleForTimestamp | 
					
						
							| 
									
										
										
										
											2017-07-04 20:55:33 +08:00
										 |  |  | 	case "out_of_bounds": | 
					
						
							| 
									
										
										
										
											2017-09-07 20:14:41 +08:00
										 |  |  | 		return 0, storage.ErrOutOfBounds | 
					
						
							| 
									
										
										
										
											2017-07-04 20:55:33 +08:00
										 |  |  | 	default: | 
					
						
							| 
									
										
										
										
											2021-02-18 20:07:00 +08:00
										 |  |  | 		return app.collectResultAppender.Append(ref, lset, t, v) | 
					
						
							| 
									
										
										
										
											2017-05-04 00:20:07 +08:00
										 |  |  | 	} | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-07-04 20:55:33 +08:00
										 |  |  | func TestScrapeLoopAppendGracefullyIfAmendOrOutOfOrderOrOutOfBounds(t *testing.T) { | 
					
						
							| 
									
										
										
										
											2017-05-04 00:20:07 +08:00
										 |  |  | 	app := &errorAppender{} | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-30 19:11:13 +08:00
										 |  |  | 	sl := newScrapeLoop(context.Background(), | 
					
						
							| 
									
										
										
										
											2017-05-26 16:44:48 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 		nil, nil, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							| 
									
										
										
										
											2020-07-24 22:10:51 +08:00
										 |  |  | 		func(ctx context.Context) storage.Appender { return app }, | 
					
						
							| 
									
										
										
										
											2018-05-18 15:32:11 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2019-03-12 18:46:15 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2019-03-15 18:04:15 +08:00
										 |  |  | 		true, | 
					
						
							| 
									
										
										
										
											2021-09-03 21:37:42 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-05-06 16:56:21 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		0, | 
					
						
							|  |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-08-24 20:31:14 +08:00
										 |  |  | 		false, | 
					
						
							| 
									
										
										
										
											2017-05-26 16:44:48 +08:00
										 |  |  | 	) | 
					
						
							| 
									
										
										
										
											2017-05-04 00:20:07 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	now := time.Unix(1, 0) | 
					
						
							| 
									
										
										
										
											2020-07-30 19:11:13 +08:00
										 |  |  | 	slApp := sl.appender(context.Background()) | 
					
						
							| 
									
										
										
										
											2020-07-16 19:53:39 +08:00
										 |  |  | 	total, added, seriesAdded, err := sl.append(slApp, []byte("out_of_order 1\namend 1\nnormal 1\nout_of_bounds 1\n"), "", now) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.NoError(t, err) | 
					
						
							|  |  |  | 	require.NoError(t, slApp.Commit()) | 
					
						
							| 
									
										
										
										
											2019-11-05 07:43:42 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-05-04 00:20:07 +08:00
										 |  |  | 	want := []sample{ | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			metric: labels.FromStrings(model.MetricNameLabel, "normal"), | 
					
						
							|  |  |  | 			t:      timestamp.FromTime(now), | 
					
						
							|  |  |  | 			v:      1, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 	} | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.Equal(t, want, app.result, "Appended samples not as expected") | 
					
						
							|  |  |  | 	require.Equal(t, 4, total) | 
					
						
							|  |  |  | 	require.Equal(t, 4, added) | 
					
						
							|  |  |  | 	require.Equal(t, 1, seriesAdded) | 
					
						
							| 
									
										
										
										
											2017-07-04 20:55:33 +08:00
										 |  |  | } | 
					
						
							| 
									
										
										
										
											2017-05-04 00:20:07 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-07-04 20:55:33 +08:00
										 |  |  | func TestScrapeLoopOutOfBoundsTimeError(t *testing.T) { | 
					
						
							|  |  |  | 	app := &collectResultAppender{} | 
					
						
							| 
									
										
										
										
											2020-07-30 19:11:13 +08:00
										 |  |  | 	sl := newScrapeLoop(context.Background(), | 
					
						
							| 
									
										
										
										
											2017-09-08 20:34:45 +08:00
										 |  |  | 		nil, | 
					
						
							|  |  |  | 		nil, nil, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							| 
									
										
										
										
											2020-07-24 22:10:51 +08:00
										 |  |  | 		func(ctx context.Context) storage.Appender { | 
					
						
							| 
									
										
										
										
											2017-07-04 20:55:33 +08:00
										 |  |  | 			return &timeLimitAppender{ | 
					
						
							|  |  |  | 				Appender: app, | 
					
						
							|  |  |  | 				maxTime:  timestamp.FromTime(time.Now().Add(10 * time.Minute)), | 
					
						
							|  |  |  | 			} | 
					
						
							|  |  |  | 		}, | 
					
						
							| 
									
										
										
										
											2018-05-18 15:32:11 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2019-03-12 18:46:15 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2019-03-15 18:04:15 +08:00
										 |  |  | 		true, | 
					
						
							| 
									
										
										
										
											2021-09-03 21:37:42 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-05-06 16:56:21 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		0, | 
					
						
							|  |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-08-24 20:31:14 +08:00
										 |  |  | 		false, | 
					
						
							| 
									
										
										
										
											2017-07-04 20:55:33 +08:00
										 |  |  | 	) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	now := time.Now().Add(20 * time.Minute) | 
					
						
							| 
									
										
										
										
											2020-07-30 19:11:13 +08:00
										 |  |  | 	slApp := sl.appender(context.Background()) | 
					
						
							| 
									
										
										
										
											2020-07-16 19:53:39 +08:00
										 |  |  | 	total, added, seriesAdded, err := sl.append(slApp, []byte("normal 1\n"), "", now) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.NoError(t, err) | 
					
						
							|  |  |  | 	require.NoError(t, slApp.Commit()) | 
					
						
							|  |  |  | 	require.Equal(t, 1, total) | 
					
						
							|  |  |  | 	require.Equal(t, 1, added) | 
					
						
							|  |  |  | 	require.Equal(t, 0, seriesAdded) | 
					
						
							| 
									
										
										
										
											2017-07-04 20:55:33 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-05-04 00:20:07 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-02-29 06:59:03 +08:00
										 |  |  | func TestTargetScraperScrapeOK(t *testing.T) { | 
					
						
							| 
									
										
										
										
											2017-04-05 01:26:28 +08:00
										 |  |  | 	const ( | 
					
						
							|  |  |  | 		configTimeout   = 1500 * time.Millisecond | 
					
						
							| 
									
										
										
										
											2021-06-18 15:38:12 +08:00
										 |  |  | 		expectedTimeout = "1.5" | 
					
						
							| 
									
										
										
										
											2017-04-05 01:26:28 +08:00
										 |  |  | 	) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-02-29 06:59:03 +08:00
										 |  |  | 	server := httptest.NewServer( | 
					
						
							|  |  |  | 		http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { | 
					
						
							| 
									
										
										
										
											2017-09-23 00:06:43 +08:00
										 |  |  | 			accept := r.Header.Get("Accept") | 
					
						
							| 
									
										
										
										
											2018-10-06 00:11:16 +08:00
										 |  |  | 			if !strings.HasPrefix(accept, "application/openmetrics-text;") { | 
					
						
							|  |  |  | 				t.Errorf("Expected Accept header to prefer application/openmetrics-text, got %q", accept) | 
					
						
							| 
									
										
										
										
											2017-09-23 00:06:43 +08:00
										 |  |  | 			} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-04-06 02:56:22 +08:00
										 |  |  | 			timeout := r.Header.Get("X-Prometheus-Scrape-Timeout-Seconds") | 
					
						
							| 
									
										
										
										
											2017-04-05 01:26:28 +08:00
										 |  |  | 			if timeout != expectedTimeout { | 
					
						
							| 
									
										
										
										
											2017-09-23 00:06:43 +08:00
										 |  |  | 				t.Errorf("Expected scrape timeout header %q, got %q", expectedTimeout, timeout) | 
					
						
							| 
									
										
										
										
											2017-04-05 01:26:28 +08:00
										 |  |  | 			} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-02-29 06:59:03 +08:00
										 |  |  | 			w.Header().Set("Content-Type", `text/plain; version=0.0.4`) | 
					
						
							|  |  |  | 			w.Write([]byte("metric_a 1\nmetric_b 2\n")) | 
					
						
							|  |  |  | 		}), | 
					
						
							|  |  |  | 	) | 
					
						
							|  |  |  | 	defer server.Close() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	serverURL, err := url.Parse(server.URL) | 
					
						
							|  |  |  | 	if err != nil { | 
					
						
							|  |  |  | 		panic(err) | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	ts := &targetScraper{ | 
					
						
							|  |  |  | 		Target: &Target{ | 
					
						
							| 
									
										
										
										
											2016-12-29 16:27:30 +08:00
										 |  |  | 			labels: labels.FromStrings( | 
					
						
							|  |  |  | 				model.SchemeLabel, serverURL.Scheme, | 
					
						
							|  |  |  | 				model.AddressLabel, serverURL.Host, | 
					
						
							|  |  |  | 			), | 
					
						
							| 
									
										
										
										
											2016-02-29 06:59:03 +08:00
										 |  |  | 		}, | 
					
						
							| 
									
										
										
										
											2017-04-05 01:26:28 +08:00
										 |  |  | 		client:  http.DefaultClient, | 
					
						
							|  |  |  | 		timeout: configTimeout, | 
					
						
							| 
									
										
										
										
											2016-02-29 06:59:03 +08:00
										 |  |  | 	} | 
					
						
							| 
									
										
										
										
											2017-01-16 00:33:07 +08:00
										 |  |  | 	var buf bytes.Buffer | 
					
						
							| 
									
										
										
										
											2016-02-29 06:59:03 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2018-11-26 21:05:07 +08:00
										 |  |  | 	contentType, err := ts.scrape(context.Background(), &buf) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.NoError(t, err) | 
					
						
							|  |  |  | 	require.Equal(t, "text/plain; version=0.0.4", contentType) | 
					
						
							|  |  |  | 	require.Equal(t, "metric_a 1\nmetric_b 2\n", buf.String()) | 
					
						
							| 
									
										
										
										
											2016-02-29 06:59:03 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | func TestTargetScrapeScrapeCancel(t *testing.T) { | 
					
						
							|  |  |  | 	block := make(chan struct{}) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	server := httptest.NewServer( | 
					
						
							|  |  |  | 		http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { | 
					
						
							|  |  |  | 			<-block | 
					
						
							|  |  |  | 		}), | 
					
						
							|  |  |  | 	) | 
					
						
							|  |  |  | 	defer server.Close() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	serverURL, err := url.Parse(server.URL) | 
					
						
							|  |  |  | 	if err != nil { | 
					
						
							|  |  |  | 		panic(err) | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	ts := &targetScraper{ | 
					
						
							|  |  |  | 		Target: &Target{ | 
					
						
							| 
									
										
										
										
											2016-12-29 16:27:30 +08:00
										 |  |  | 			labels: labels.FromStrings( | 
					
						
							|  |  |  | 				model.SchemeLabel, serverURL.Scheme, | 
					
						
							|  |  |  | 				model.AddressLabel, serverURL.Host, | 
					
						
							|  |  |  | 			), | 
					
						
							| 
									
										
										
										
											2016-02-29 06:59:03 +08:00
										 |  |  | 		}, | 
					
						
							|  |  |  | 		client: http.DefaultClient, | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 	ctx, cancel := context.WithCancel(context.Background()) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-02-13 15:53:07 +08:00
										 |  |  | 	errc := make(chan error, 1) | 
					
						
							| 
									
										
										
										
											2016-02-29 06:59:03 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	go func() { | 
					
						
							|  |  |  | 		time.Sleep(1 * time.Second) | 
					
						
							|  |  |  | 		cancel() | 
					
						
							|  |  |  | 	}() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	go func() { | 
					
						
							| 
									
										
										
										
											2018-11-19 19:31:16 +08:00
										 |  |  | 		_, err := ts.scrape(ctx, ioutil.Discard) | 
					
						
							|  |  |  | 		if err == nil { | 
					
						
							| 
									
										
										
										
											2019-03-26 07:01:12 +08:00
										 |  |  | 			errc <- errors.New("Expected error but got nil") | 
					
						
							| 
									
										
										
										
											2018-11-19 19:31:16 +08:00
										 |  |  | 		} else if ctx.Err() != context.Canceled { | 
					
						
							| 
									
										
										
										
											2019-10-10 17:47:30 +08:00
										 |  |  | 			errc <- errors.Errorf("Expected context cancellation error but got: %s", ctx.Err()) | 
					
						
							| 
									
										
										
										
											2020-02-13 15:53:07 +08:00
										 |  |  | 		} else { | 
					
						
							|  |  |  | 			close(errc) | 
					
						
							| 
									
										
										
										
											2016-02-29 06:59:03 +08:00
										 |  |  | 		} | 
					
						
							|  |  |  | 	}() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	select { | 
					
						
							|  |  |  | 	case <-time.After(5 * time.Second): | 
					
						
							|  |  |  | 		t.Fatalf("Scrape function did not return unexpectedly") | 
					
						
							| 
									
										
										
										
											2016-11-14 01:21:42 +08:00
										 |  |  | 	case err := <-errc: | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 		require.NoError(t, err) | 
					
						
							| 
									
										
										
										
											2016-02-29 06:59:03 +08:00
										 |  |  | 	} | 
					
						
							|  |  |  | 	// If this is closed in a defer above the function the test server
 | 
					
						
							| 
									
										
										
										
											2018-04-27 20:04:02 +08:00
										 |  |  | 	// doesn't terminate and the test doesn't complete.
 | 
					
						
							| 
									
										
										
										
											2016-02-29 06:59:03 +08:00
										 |  |  | 	close(block) | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | func TestTargetScrapeScrapeNotFound(t *testing.T) { | 
					
						
							|  |  |  | 	server := httptest.NewServer( | 
					
						
							|  |  |  | 		http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { | 
					
						
							|  |  |  | 			w.WriteHeader(http.StatusNotFound) | 
					
						
							|  |  |  | 		}), | 
					
						
							|  |  |  | 	) | 
					
						
							|  |  |  | 	defer server.Close() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	serverURL, err := url.Parse(server.URL) | 
					
						
							|  |  |  | 	if err != nil { | 
					
						
							|  |  |  | 		panic(err) | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	ts := &targetScraper{ | 
					
						
							|  |  |  | 		Target: &Target{ | 
					
						
							| 
									
										
										
										
											2016-12-29 16:27:30 +08:00
										 |  |  | 			labels: labels.FromStrings( | 
					
						
							|  |  |  | 				model.SchemeLabel, serverURL.Scheme, | 
					
						
							|  |  |  | 				model.AddressLabel, serverURL.Host, | 
					
						
							|  |  |  | 			), | 
					
						
							| 
									
										
										
										
											2016-02-29 06:59:03 +08:00
										 |  |  | 		}, | 
					
						
							|  |  |  | 		client: http.DefaultClient, | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-11-05 07:43:42 +08:00
										 |  |  | 	_, err = ts.scrape(context.Background(), ioutil.Discard) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.Contains(t, err.Error(), "404", "Expected \"404 NotFound\" error but got: %s", err) | 
					
						
							| 
									
										
										
										
											2016-02-29 06:59:03 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-05-16 10:19:22 +08:00
										 |  |  | func TestTargetScraperBodySizeLimit(t *testing.T) { | 
					
						
							|  |  |  | 	const ( | 
					
						
							|  |  |  | 		bodySizeLimit = 15 | 
					
						
							|  |  |  | 		responseBody  = "metric_a 1\nmetric_b 2\n" | 
					
						
							|  |  |  | 	) | 
					
						
							|  |  |  | 	var gzipResponse bool | 
					
						
							|  |  |  | 	server := httptest.NewServer( | 
					
						
							|  |  |  | 		http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { | 
					
						
							|  |  |  | 			w.Header().Set("Content-Type", `text/plain; version=0.0.4`) | 
					
						
							|  |  |  | 			if gzipResponse { | 
					
						
							|  |  |  | 				w.Header().Set("Content-Encoding", "gzip") | 
					
						
							|  |  |  | 				gw := gzip.NewWriter(w) | 
					
						
							|  |  |  | 				defer gw.Close() | 
					
						
							|  |  |  | 				gw.Write([]byte(responseBody)) | 
					
						
							|  |  |  | 				return | 
					
						
							|  |  |  | 			} | 
					
						
							|  |  |  | 			w.Write([]byte(responseBody)) | 
					
						
							|  |  |  | 		}), | 
					
						
							|  |  |  | 	) | 
					
						
							|  |  |  | 	defer server.Close() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	serverURL, err := url.Parse(server.URL) | 
					
						
							|  |  |  | 	if err != nil { | 
					
						
							|  |  |  | 		panic(err) | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	ts := &targetScraper{ | 
					
						
							|  |  |  | 		Target: &Target{ | 
					
						
							|  |  |  | 			labels: labels.FromStrings( | 
					
						
							|  |  |  | 				model.SchemeLabel, serverURL.Scheme, | 
					
						
							|  |  |  | 				model.AddressLabel, serverURL.Host, | 
					
						
							|  |  |  | 			), | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		client:        http.DefaultClient, | 
					
						
							|  |  |  | 		bodySizeLimit: bodySizeLimit, | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 	var buf bytes.Buffer | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	// Target response uncompressed body, scrape with body size limit.
 | 
					
						
							|  |  |  | 	_, err = ts.scrape(context.Background(), &buf) | 
					
						
							|  |  |  | 	require.ErrorIs(t, err, errBodySizeLimit) | 
					
						
							|  |  |  | 	require.Equal(t, bodySizeLimit, buf.Len()) | 
					
						
							|  |  |  | 	// Target response gzip compressed body, scrape with body size limit.
 | 
					
						
							|  |  |  | 	gzipResponse = true | 
					
						
							|  |  |  | 	buf.Reset() | 
					
						
							|  |  |  | 	_, err = ts.scrape(context.Background(), &buf) | 
					
						
							|  |  |  | 	require.ErrorIs(t, err, errBodySizeLimit) | 
					
						
							|  |  |  | 	require.Equal(t, bodySizeLimit, buf.Len()) | 
					
						
							|  |  |  | 	// Target response uncompressed body, scrape without body size limit.
 | 
					
						
							|  |  |  | 	gzipResponse = false | 
					
						
							|  |  |  | 	buf.Reset() | 
					
						
							|  |  |  | 	ts.bodySizeLimit = 0 | 
					
						
							|  |  |  | 	_, err = ts.scrape(context.Background(), &buf) | 
					
						
							|  |  |  | 	require.NoError(t, err) | 
					
						
							|  |  |  | 	require.Equal(t, len(responseBody), buf.Len()) | 
					
						
							|  |  |  | 	// Target response gzip compressed body, scrape without body size limit.
 | 
					
						
							|  |  |  | 	gzipResponse = true | 
					
						
							|  |  |  | 	buf.Reset() | 
					
						
							|  |  |  | 	_, err = ts.scrape(context.Background(), &buf) | 
					
						
							|  |  |  | 	require.NoError(t, err) | 
					
						
							|  |  |  | 	require.Equal(t, len(responseBody), buf.Len()) | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-02-23 17:58:16 +08:00
										 |  |  | // testScraper implements the scraper interface and allows setting values
 | 
					
						
							|  |  |  | // returned by its methods. It also allows setting a custom scrape function.
 | 
					
						
							|  |  |  | type testScraper struct { | 
					
						
							|  |  |  | 	offsetDur time.Duration | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	lastStart    time.Time | 
					
						
							|  |  |  | 	lastDuration time.Duration | 
					
						
							|  |  |  | 	lastError    error | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	scrapeErr  error | 
					
						
							| 
									
										
										
										
											2017-01-16 00:33:07 +08:00
										 |  |  | 	scrapeFunc func(context.Context, io.Writer) error | 
					
						
							| 
									
										
										
										
											2016-02-23 17:58:16 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-03-12 18:46:15 +08:00
										 |  |  | func (ts *testScraper) offset(interval time.Duration, jitterSeed uint64) time.Duration { | 
					
						
							| 
									
										
										
										
											2016-02-23 17:58:16 +08:00
										 |  |  | 	return ts.offsetDur | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-11-12 05:42:24 +08:00
										 |  |  | func (ts *testScraper) Report(start time.Time, duration time.Duration, err error) { | 
					
						
							| 
									
										
										
										
											2016-02-23 17:58:16 +08:00
										 |  |  | 	ts.lastStart = start | 
					
						
							|  |  |  | 	ts.lastDuration = duration | 
					
						
							|  |  |  | 	ts.lastError = err | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2018-10-04 21:52:03 +08:00
										 |  |  | func (ts *testScraper) scrape(ctx context.Context, w io.Writer) (string, error) { | 
					
						
							| 
									
										
										
										
											2016-02-23 17:58:16 +08:00
										 |  |  | 	if ts.scrapeFunc != nil { | 
					
						
							| 
									
										
										
										
											2018-10-04 21:52:03 +08:00
										 |  |  | 		return "", ts.scrapeFunc(ctx, w) | 
					
						
							| 
									
										
										
										
											2016-02-23 17:58:16 +08:00
										 |  |  | 	} | 
					
						
							| 
									
										
										
										
											2018-10-04 21:52:03 +08:00
										 |  |  | 	return "", ts.scrapeErr | 
					
						
							| 
									
										
										
										
											2016-02-23 17:58:16 +08:00
										 |  |  | } | 
					
						
							| 
									
										
										
										
											2019-03-15 18:04:15 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | func TestScrapeLoop_RespectTimestamps(t *testing.T) { | 
					
						
							| 
									
										
										
										
											2019-08-09 09:35:39 +08:00
										 |  |  | 	s := teststorage.New(t) | 
					
						
							| 
									
										
										
										
											2019-03-15 18:04:15 +08:00
										 |  |  | 	defer s.Close() | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-30 19:11:13 +08:00
										 |  |  | 	app := s.Appender(context.Background()) | 
					
						
							| 
									
										
										
										
											2019-11-05 07:43:42 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-03-15 18:04:15 +08:00
										 |  |  | 	capp := &collectResultAppender{next: app} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-30 19:11:13 +08:00
										 |  |  | 	sl := newScrapeLoop(context.Background(), | 
					
						
							| 
									
										
										
										
											2019-03-15 18:04:15 +08:00
										 |  |  | 		nil, nil, nil, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							| 
									
										
										
										
											2020-07-24 22:10:51 +08:00
										 |  |  | 		func(ctx context.Context) storage.Appender { return capp }, | 
					
						
							| 
									
										
										
										
											2019-03-15 18:04:15 +08:00
										 |  |  | 		nil, 0, | 
					
						
							|  |  |  | 		true, | 
					
						
							| 
									
										
										
										
											2021-09-03 21:37:42 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-05-06 16:56:21 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		0, | 
					
						
							|  |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-08-24 20:31:14 +08:00
										 |  |  | 		false, | 
					
						
							| 
									
										
										
										
											2019-03-15 18:04:15 +08:00
										 |  |  | 	) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	now := time.Now() | 
					
						
							| 
									
										
										
										
											2020-07-30 19:11:13 +08:00
										 |  |  | 	slApp := sl.appender(context.Background()) | 
					
						
							| 
									
										
										
										
											2020-07-16 19:53:39 +08:00
										 |  |  | 	_, _, _, err := sl.append(slApp, []byte(`metric_a{a="1",b="1"} 1 0`), "", now) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.NoError(t, err) | 
					
						
							|  |  |  | 	require.NoError(t, slApp.Commit()) | 
					
						
							| 
									
										
										
										
											2019-03-15 18:04:15 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	want := []sample{ | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			metric: labels.FromStrings("__name__", "metric_a", "a", "1", "b", "1"), | 
					
						
							|  |  |  | 			t:      0, | 
					
						
							|  |  |  | 			v:      1, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 	} | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.Equal(t, want, capp.result, "Appended samples not as expected") | 
					
						
							| 
									
										
										
										
											2019-03-15 18:04:15 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | func TestScrapeLoop_DiscardTimestamps(t *testing.T) { | 
					
						
							| 
									
										
										
										
											2019-08-09 09:35:39 +08:00
										 |  |  | 	s := teststorage.New(t) | 
					
						
							| 
									
										
										
										
											2019-03-15 18:04:15 +08:00
										 |  |  | 	defer s.Close() | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-30 19:11:13 +08:00
										 |  |  | 	app := s.Appender(context.Background()) | 
					
						
							| 
									
										
										
										
											2019-11-05 07:43:42 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-03-15 18:04:15 +08:00
										 |  |  | 	capp := &collectResultAppender{next: app} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-30 19:11:13 +08:00
										 |  |  | 	sl := newScrapeLoop(context.Background(), | 
					
						
							| 
									
										
										
										
											2019-03-15 18:04:15 +08:00
										 |  |  | 		nil, nil, nil, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							| 
									
										
										
										
											2020-07-24 22:10:51 +08:00
										 |  |  | 		func(ctx context.Context) storage.Appender { return capp }, | 
					
						
							| 
									
										
										
										
											2019-03-15 18:04:15 +08:00
										 |  |  | 		nil, 0, | 
					
						
							|  |  |  | 		false, | 
					
						
							| 
									
										
										
										
											2021-09-03 21:37:42 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-05-06 16:56:21 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		0, | 
					
						
							|  |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-08-24 20:31:14 +08:00
										 |  |  | 		false, | 
					
						
							| 
									
										
										
										
											2019-03-15 18:04:15 +08:00
										 |  |  | 	) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	now := time.Now() | 
					
						
							| 
									
										
										
										
											2020-07-30 19:11:13 +08:00
										 |  |  | 	slApp := sl.appender(context.Background()) | 
					
						
							| 
									
										
										
										
											2020-07-16 19:53:39 +08:00
										 |  |  | 	_, _, _, err := sl.append(slApp, []byte(`metric_a{a="1",b="1"} 1 0`), "", now) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.NoError(t, err) | 
					
						
							|  |  |  | 	require.NoError(t, slApp.Commit()) | 
					
						
							| 
									
										
										
										
											2019-03-15 18:04:15 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	want := []sample{ | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			metric: labels.FromStrings("__name__", "metric_a", "a", "1", "b", "1"), | 
					
						
							|  |  |  | 			t:      timestamp.FromTime(now), | 
					
						
							|  |  |  | 			v:      1, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 	} | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.Equal(t, want, capp.result, "Appended samples not as expected") | 
					
						
							| 
									
										
										
										
											2019-03-15 18:04:15 +08:00
										 |  |  | } | 
					
						
							| 
									
										
										
										
											2020-01-20 19:05:27 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | func TestScrapeLoopDiscardDuplicateLabels(t *testing.T) { | 
					
						
							|  |  |  | 	s := teststorage.New(t) | 
					
						
							|  |  |  | 	defer s.Close() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	ctx, cancel := context.WithCancel(context.Background()) | 
					
						
							|  |  |  | 	sl := newScrapeLoop(ctx, | 
					
						
							|  |  |  | 		&testScraper{}, | 
					
						
							|  |  |  | 		nil, nil, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							| 
									
										
										
										
											2020-07-17 18:30:22 +08:00
										 |  |  | 		s.Appender, | 
					
						
							| 
									
										
										
										
											2020-01-20 19:05:27 +08:00
										 |  |  | 		nil, | 
					
						
							|  |  |  | 		0, | 
					
						
							|  |  |  | 		true, | 
					
						
							| 
									
										
										
										
											2021-09-03 21:37:42 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-05-06 16:56:21 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		0, | 
					
						
							|  |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-08-24 20:31:14 +08:00
										 |  |  | 		false, | 
					
						
							| 
									
										
										
										
											2020-01-20 19:05:27 +08:00
										 |  |  | 	) | 
					
						
							|  |  |  | 	defer cancel() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	// We add a good and a bad metric to check that both are discarded.
 | 
					
						
							| 
									
										
										
										
											2020-07-24 22:10:51 +08:00
										 |  |  | 	slApp := sl.appender(ctx) | 
					
						
							| 
									
										
										
										
											2020-07-16 19:53:39 +08:00
										 |  |  | 	_, _, _, err := sl.append(slApp, []byte("test_metric{le=\"500\"} 1\ntest_metric{le=\"600\",le=\"700\"} 1\n"), "", time.Time{}) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.Error(t, err) | 
					
						
							|  |  |  | 	require.NoError(t, slApp.Rollback()) | 
					
						
							| 
									
										
										
										
											2020-01-20 19:05:27 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	q, err := s.Querier(ctx, time.Time{}.UnixNano(), 0) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.NoError(t, err) | 
					
						
							| 
									
										
										
										
											2020-06-10 00:57:31 +08:00
										 |  |  | 	series := q.Select(false, nil, labels.MustNewMatcher(labels.MatchRegexp, "__name__", ".*")) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.Equal(t, false, series.Next(), "series found in tsdb") | 
					
						
							|  |  |  | 	require.NoError(t, series.Err()) | 
					
						
							| 
									
										
										
										
											2020-01-20 19:05:27 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	// We add a good metric to check that it is recorded.
 | 
					
						
							| 
									
										
										
										
											2020-07-24 22:10:51 +08:00
										 |  |  | 	slApp = sl.appender(ctx) | 
					
						
							| 
									
										
										
										
											2020-07-16 19:53:39 +08:00
										 |  |  | 	_, _, _, err = sl.append(slApp, []byte("test_metric{le=\"500\"} 1\n"), "", time.Time{}) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.NoError(t, err) | 
					
						
							|  |  |  | 	require.NoError(t, slApp.Commit()) | 
					
						
							| 
									
										
										
										
											2020-01-20 19:05:27 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	q, err = s.Querier(ctx, time.Time{}.UnixNano(), 0) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.NoError(t, err) | 
					
						
							| 
									
										
										
										
											2020-06-10 00:57:31 +08:00
										 |  |  | 	series = q.Select(false, nil, labels.MustNewMatcher(labels.MatchEqual, "le", "500")) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.Equal(t, true, series.Next(), "series not found in tsdb") | 
					
						
							|  |  |  | 	require.NoError(t, series.Err()) | 
					
						
							|  |  |  | 	require.Equal(t, false, series.Next(), "more than one series found in tsdb") | 
					
						
							| 
									
										
										
										
											2020-01-20 19:05:27 +08:00
										 |  |  | } | 
					
						
							| 
									
										
										
										
											2020-01-22 20:13:47 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-03-02 15:18:05 +08:00
										 |  |  | func TestScrapeLoopDiscardUnnamedMetrics(t *testing.T) { | 
					
						
							|  |  |  | 	s := teststorage.New(t) | 
					
						
							|  |  |  | 	defer s.Close() | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-30 19:11:13 +08:00
										 |  |  | 	app := s.Appender(context.Background()) | 
					
						
							| 
									
										
										
										
											2020-03-02 15:18:05 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	ctx, cancel := context.WithCancel(context.Background()) | 
					
						
							| 
									
										
										
										
											2020-07-30 19:11:13 +08:00
										 |  |  | 	sl := newScrapeLoop(context.Background(), | 
					
						
							| 
									
										
										
										
											2020-03-02 15:18:05 +08:00
										 |  |  | 		&testScraper{}, | 
					
						
							|  |  |  | 		nil, nil, | 
					
						
							|  |  |  | 		func(l labels.Labels) labels.Labels { | 
					
						
							|  |  |  | 			if l.Has("drop") { | 
					
						
							|  |  |  | 				return labels.Labels{} | 
					
						
							|  |  |  | 			} | 
					
						
							|  |  |  | 			return l | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							| 
									
										
										
										
											2020-07-24 22:10:51 +08:00
										 |  |  | 		func(ctx context.Context) storage.Appender { return app }, | 
					
						
							| 
									
										
										
										
											2020-03-02 15:18:05 +08:00
										 |  |  | 		nil, | 
					
						
							|  |  |  | 		0, | 
					
						
							|  |  |  | 		true, | 
					
						
							| 
									
										
										
										
											2021-09-03 21:37:42 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-05-06 16:56:21 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		0, | 
					
						
							|  |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-08-24 20:31:14 +08:00
										 |  |  | 		false, | 
					
						
							| 
									
										
										
										
											2020-03-02 15:18:05 +08:00
										 |  |  | 	) | 
					
						
							|  |  |  | 	defer cancel() | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-30 19:11:13 +08:00
										 |  |  | 	slApp := sl.appender(context.Background()) | 
					
						
							| 
									
										
										
										
											2020-07-16 19:53:39 +08:00
										 |  |  | 	_, _, _, err := sl.append(slApp, []byte("nok 1\nnok2{drop=\"drop\"} 1\n"), "", time.Time{}) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.Error(t, err) | 
					
						
							|  |  |  | 	require.NoError(t, slApp.Rollback()) | 
					
						
							|  |  |  | 	require.Equal(t, errNameLabelMandatory, err) | 
					
						
							| 
									
										
										
										
											2020-03-02 15:18:05 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	q, err := s.Querier(ctx, time.Time{}.UnixNano(), 0) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.NoError(t, err) | 
					
						
							| 
									
										
										
										
											2020-06-10 00:57:31 +08:00
										 |  |  | 	series := q.Select(false, nil, labels.MustNewMatcher(labels.MatchRegexp, "__name__", ".*")) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.Equal(t, false, series.Next(), "series found in tsdb") | 
					
						
							|  |  |  | 	require.NoError(t, series.Err()) | 
					
						
							| 
									
										
										
										
											2020-03-02 15:18:05 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-01-22 20:13:47 +08:00
										 |  |  | func TestReusableConfig(t *testing.T) { | 
					
						
							|  |  |  | 	variants := []*config.ScrapeConfig{ | 
					
						
							| 
									
										
										
										
											2020-04-15 18:17:41 +08:00
										 |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2020-01-22 20:13:47 +08:00
										 |  |  | 			JobName:       "prometheus", | 
					
						
							|  |  |  | 			ScrapeTimeout: model.Duration(15 * time.Second), | 
					
						
							|  |  |  | 		}, | 
					
						
							| 
									
										
										
										
											2020-04-15 18:17:41 +08:00
										 |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2020-01-22 20:13:47 +08:00
										 |  |  | 			JobName:       "httpd", | 
					
						
							|  |  |  | 			ScrapeTimeout: model.Duration(15 * time.Second), | 
					
						
							|  |  |  | 		}, | 
					
						
							| 
									
										
										
										
											2020-04-15 18:17:41 +08:00
										 |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2020-01-22 20:13:47 +08:00
										 |  |  | 			JobName:       "prometheus", | 
					
						
							|  |  |  | 			ScrapeTimeout: model.Duration(5 * time.Second), | 
					
						
							|  |  |  | 		}, | 
					
						
							| 
									
										
										
										
											2020-04-15 18:17:41 +08:00
										 |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2020-01-22 20:13:47 +08:00
										 |  |  | 			JobName:     "prometheus", | 
					
						
							|  |  |  | 			MetricsPath: "/metrics", | 
					
						
							|  |  |  | 		}, | 
					
						
							| 
									
										
										
										
											2020-04-15 18:17:41 +08:00
										 |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2020-01-22 20:13:47 +08:00
										 |  |  | 			JobName:     "prometheus", | 
					
						
							|  |  |  | 			MetricsPath: "/metrics2", | 
					
						
							|  |  |  | 		}, | 
					
						
							| 
									
										
										
										
											2020-04-15 18:17:41 +08:00
										 |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2020-01-22 20:13:47 +08:00
										 |  |  | 			JobName:       "prometheus", | 
					
						
							|  |  |  | 			ScrapeTimeout: model.Duration(5 * time.Second), | 
					
						
							|  |  |  | 			MetricsPath:   "/metrics2", | 
					
						
							|  |  |  | 		}, | 
					
						
							| 
									
										
										
										
											2020-04-15 18:17:41 +08:00
										 |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2020-01-22 20:13:47 +08:00
										 |  |  | 			JobName:        "prometheus", | 
					
						
							|  |  |  | 			ScrapeInterval: model.Duration(5 * time.Second), | 
					
						
							|  |  |  | 			MetricsPath:    "/metrics2", | 
					
						
							|  |  |  | 		}, | 
					
						
							| 
									
										
										
										
											2020-04-15 18:17:41 +08:00
										 |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2020-01-22 20:13:47 +08:00
										 |  |  | 			JobName:        "prometheus", | 
					
						
							|  |  |  | 			ScrapeInterval: model.Duration(5 * time.Second), | 
					
						
							|  |  |  | 			SampleLimit:    1000, | 
					
						
							|  |  |  | 			MetricsPath:    "/metrics2", | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	match := [][]int{ | 
					
						
							| 
									
										
										
										
											2020-04-15 18:17:41 +08:00
										 |  |  | 		{0, 2}, | 
					
						
							|  |  |  | 		{4, 5}, | 
					
						
							|  |  |  | 		{4, 6}, | 
					
						
							|  |  |  | 		{4, 7}, | 
					
						
							|  |  |  | 		{5, 6}, | 
					
						
							|  |  |  | 		{5, 7}, | 
					
						
							|  |  |  | 		{6, 7}, | 
					
						
							| 
									
										
										
										
											2020-01-22 20:13:47 +08:00
										 |  |  | 	} | 
					
						
							|  |  |  | 	noMatch := [][]int{ | 
					
						
							| 
									
										
										
										
											2020-04-15 18:17:41 +08:00
										 |  |  | 		{1, 2}, | 
					
						
							|  |  |  | 		{0, 4}, | 
					
						
							|  |  |  | 		{3, 4}, | 
					
						
							| 
									
										
										
										
											2020-01-22 20:13:47 +08:00
										 |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	for i, m := range match { | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 		require.Equal(t, true, reusableCache(variants[m[0]], variants[m[1]]), "match test %d", i) | 
					
						
							|  |  |  | 		require.Equal(t, true, reusableCache(variants[m[1]], variants[m[0]]), "match test %d", i) | 
					
						
							|  |  |  | 		require.Equal(t, true, reusableCache(variants[m[1]], variants[m[1]]), "match test %d", i) | 
					
						
							|  |  |  | 		require.Equal(t, true, reusableCache(variants[m[0]], variants[m[0]]), "match test %d", i) | 
					
						
							| 
									
										
										
										
											2020-01-22 20:13:47 +08:00
										 |  |  | 	} | 
					
						
							|  |  |  | 	for i, m := range noMatch { | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 		require.Equal(t, false, reusableCache(variants[m[0]], variants[m[1]]), "not match test %d", i) | 
					
						
							|  |  |  | 		require.Equal(t, false, reusableCache(variants[m[1]], variants[m[0]]), "not match test %d", i) | 
					
						
							| 
									
										
										
										
											2020-01-22 20:13:47 +08:00
										 |  |  | 	} | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | func TestReuseScrapeCache(t *testing.T) { | 
					
						
							|  |  |  | 	var ( | 
					
						
							|  |  |  | 		app = &nopAppendable{} | 
					
						
							|  |  |  | 		cfg = &config.ScrapeConfig{ | 
					
						
							|  |  |  | 			JobName:        "Prometheus", | 
					
						
							|  |  |  | 			ScrapeTimeout:  model.Duration(5 * time.Second), | 
					
						
							|  |  |  | 			ScrapeInterval: model.Duration(5 * time.Second), | 
					
						
							|  |  |  | 			MetricsPath:    "/metrics", | 
					
						
							|  |  |  | 		} | 
					
						
							| 
									
										
										
										
											2021-08-24 20:31:14 +08:00
										 |  |  | 		sp, _ = newScrapePool(cfg, app, 0, nil, false) | 
					
						
							| 
									
										
										
										
											2020-01-22 20:13:47 +08:00
										 |  |  | 		t1    = &Target{ | 
					
						
							|  |  |  | 			discoveredLabels: labels.Labels{ | 
					
						
							|  |  |  | 				labels.Label{ | 
					
						
							|  |  |  | 					Name:  "labelNew", | 
					
						
							|  |  |  | 					Value: "nameNew", | 
					
						
							|  |  |  | 				}, | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 		proxyURL, _ = url.Parse("http://localhost:2128") | 
					
						
							|  |  |  | 	) | 
					
						
							| 
									
										
										
										
											2020-07-27 16:38:08 +08:00
										 |  |  | 	defer sp.stop() | 
					
						
							| 
									
										
										
										
											2020-01-22 20:13:47 +08:00
										 |  |  | 	sp.sync([]*Target{t1}) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	steps := []struct { | 
					
						
							|  |  |  | 		keep      bool | 
					
						
							|  |  |  | 		newConfig *config.ScrapeConfig | 
					
						
							|  |  |  | 	}{ | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			keep: true, | 
					
						
							|  |  |  | 			newConfig: &config.ScrapeConfig{ | 
					
						
							|  |  |  | 				JobName:        "Prometheus", | 
					
						
							|  |  |  | 				ScrapeInterval: model.Duration(5 * time.Second), | 
					
						
							|  |  |  | 				ScrapeTimeout:  model.Duration(5 * time.Second), | 
					
						
							|  |  |  | 				MetricsPath:    "/metrics", | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			keep: false, | 
					
						
							|  |  |  | 			newConfig: &config.ScrapeConfig{ | 
					
						
							|  |  |  | 				JobName:        "Prometheus", | 
					
						
							|  |  |  | 				ScrapeInterval: model.Duration(5 * time.Second), | 
					
						
							|  |  |  | 				ScrapeTimeout:  model.Duration(15 * time.Second), | 
					
						
							|  |  |  | 				MetricsPath:    "/metrics2", | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			keep: true, | 
					
						
							|  |  |  | 			newConfig: &config.ScrapeConfig{ | 
					
						
							|  |  |  | 				JobName:        "Prometheus", | 
					
						
							|  |  |  | 				SampleLimit:    400, | 
					
						
							|  |  |  | 				ScrapeInterval: model.Duration(5 * time.Second), | 
					
						
							|  |  |  | 				ScrapeTimeout:  model.Duration(15 * time.Second), | 
					
						
							|  |  |  | 				MetricsPath:    "/metrics2", | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			keep: false, | 
					
						
							|  |  |  | 			newConfig: &config.ScrapeConfig{ | 
					
						
							|  |  |  | 				JobName:         "Prometheus", | 
					
						
							|  |  |  | 				HonorTimestamps: true, | 
					
						
							|  |  |  | 				SampleLimit:     400, | 
					
						
							|  |  |  | 				ScrapeInterval:  model.Duration(5 * time.Second), | 
					
						
							|  |  |  | 				ScrapeTimeout:   model.Duration(15 * time.Second), | 
					
						
							|  |  |  | 				MetricsPath:     "/metrics2", | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			keep: true, | 
					
						
							|  |  |  | 			newConfig: &config.ScrapeConfig{ | 
					
						
							|  |  |  | 				JobName:         "Prometheus", | 
					
						
							|  |  |  | 				HonorTimestamps: true, | 
					
						
							|  |  |  | 				SampleLimit:     400, | 
					
						
							|  |  |  | 				HTTPClientConfig: config_util.HTTPClientConfig{ | 
					
						
							|  |  |  | 					ProxyURL: config_util.URL{URL: proxyURL}, | 
					
						
							|  |  |  | 				}, | 
					
						
							|  |  |  | 				ScrapeInterval: model.Duration(5 * time.Second), | 
					
						
							|  |  |  | 				ScrapeTimeout:  model.Duration(15 * time.Second), | 
					
						
							|  |  |  | 				MetricsPath:    "/metrics2", | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			keep: false, | 
					
						
							|  |  |  | 			newConfig: &config.ScrapeConfig{ | 
					
						
							|  |  |  | 				JobName:         "Prometheus", | 
					
						
							|  |  |  | 				HonorTimestamps: true, | 
					
						
							|  |  |  | 				HonorLabels:     true, | 
					
						
							|  |  |  | 				SampleLimit:     400, | 
					
						
							|  |  |  | 				ScrapeInterval:  model.Duration(5 * time.Second), | 
					
						
							|  |  |  | 				ScrapeTimeout:   model.Duration(15 * time.Second), | 
					
						
							|  |  |  | 				MetricsPath:     "/metrics2", | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	cacheAddr := func(sp *scrapePool) map[uint64]string { | 
					
						
							|  |  |  | 		r := make(map[uint64]string) | 
					
						
							|  |  |  | 		for fp, l := range sp.loops { | 
					
						
							|  |  |  | 			r[fp] = fmt.Sprintf("%p", l.getCache()) | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 		return r | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	for i, s := range steps { | 
					
						
							|  |  |  | 		initCacheAddr := cacheAddr(sp) | 
					
						
							|  |  |  | 		sp.reload(s.newConfig) | 
					
						
							|  |  |  | 		for fp, newCacheAddr := range cacheAddr(sp) { | 
					
						
							|  |  |  | 			if s.keep { | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 				require.Equal(t, initCacheAddr[fp], newCacheAddr, "step %d: old cache and new cache are not the same", i) | 
					
						
							| 
									
										
										
										
											2020-01-22 20:13:47 +08:00
										 |  |  | 			} else { | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 				require.NotEqual(t, initCacheAddr[fp], newCacheAddr, "step %d: old cache and new cache are the same", i) | 
					
						
							| 
									
										
										
										
											2020-01-22 20:13:47 +08:00
										 |  |  | 			} | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 		initCacheAddr = cacheAddr(sp) | 
					
						
							|  |  |  | 		sp.reload(s.newConfig) | 
					
						
							|  |  |  | 		for fp, newCacheAddr := range cacheAddr(sp) { | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 			require.Equal(t, initCacheAddr[fp], newCacheAddr, "step %d: reloading the exact config invalidates the cache", i) | 
					
						
							| 
									
										
										
										
											2020-01-22 20:13:47 +08:00
										 |  |  | 		} | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | } | 
					
						
							| 
									
										
										
										
											2020-03-17 05:52:02 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | func TestScrapeAddFast(t *testing.T) { | 
					
						
							|  |  |  | 	s := teststorage.New(t) | 
					
						
							|  |  |  | 	defer s.Close() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	ctx, cancel := context.WithCancel(context.Background()) | 
					
						
							|  |  |  | 	sl := newScrapeLoop(ctx, | 
					
						
							|  |  |  | 		&testScraper{}, | 
					
						
							|  |  |  | 		nil, nil, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							| 
									
										
										
										
											2020-07-17 18:30:22 +08:00
										 |  |  | 		s.Appender, | 
					
						
							| 
									
										
										
										
											2020-03-17 05:52:02 +08:00
										 |  |  | 		nil, | 
					
						
							|  |  |  | 		0, | 
					
						
							|  |  |  | 		true, | 
					
						
							| 
									
										
										
										
											2021-09-03 21:37:42 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-05-06 16:56:21 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		0, | 
					
						
							|  |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-08-24 20:31:14 +08:00
										 |  |  | 		false, | 
					
						
							| 
									
										
										
										
											2020-03-17 05:52:02 +08:00
										 |  |  | 	) | 
					
						
							|  |  |  | 	defer cancel() | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-24 22:10:51 +08:00
										 |  |  | 	slApp := sl.appender(ctx) | 
					
						
							| 
									
										
										
										
											2020-07-16 19:53:39 +08:00
										 |  |  | 	_, _, _, err := sl.append(slApp, []byte("up 1\n"), "", time.Time{}) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.NoError(t, err) | 
					
						
							|  |  |  | 	require.NoError(t, slApp.Commit()) | 
					
						
							| 
									
										
										
										
											2020-03-17 05:52:02 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	// Poison the cache. There is just one entry, and one series in the
 | 
					
						
							|  |  |  | 	// storage. Changing the ref will create a 'not found' error.
 | 
					
						
							|  |  |  | 	for _, v := range sl.getCache().series { | 
					
						
							|  |  |  | 		v.ref++ | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-24 22:10:51 +08:00
										 |  |  | 	slApp = sl.appender(ctx) | 
					
						
							| 
									
										
										
										
											2020-07-16 19:53:39 +08:00
										 |  |  | 	_, _, _, err = sl.append(slApp, []byte("up 1\n"), "", time.Time{}.Add(time.Second)) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.NoError(t, err) | 
					
						
							|  |  |  | 	require.NoError(t, slApp.Commit()) | 
					
						
							| 
									
										
										
										
											2020-03-17 05:52:02 +08:00
										 |  |  | } | 
					
						
							| 
									
										
										
										
											2020-03-21 00:43:26 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | func TestReuseCacheRace(t *testing.T) { | 
					
						
							|  |  |  | 	var ( | 
					
						
							|  |  |  | 		app = &nopAppendable{} | 
					
						
							|  |  |  | 		cfg = &config.ScrapeConfig{ | 
					
						
							|  |  |  | 			JobName:        "Prometheus", | 
					
						
							|  |  |  | 			ScrapeTimeout:  model.Duration(5 * time.Second), | 
					
						
							|  |  |  | 			ScrapeInterval: model.Duration(5 * time.Second), | 
					
						
							|  |  |  | 			MetricsPath:    "/metrics", | 
					
						
							|  |  |  | 		} | 
					
						
							| 
									
										
										
										
											2021-08-24 20:31:14 +08:00
										 |  |  | 		sp, _ = newScrapePool(cfg, app, 0, nil, false) | 
					
						
							| 
									
										
										
										
											2020-03-21 00:43:26 +08:00
										 |  |  | 		t1    = &Target{ | 
					
						
							|  |  |  | 			discoveredLabels: labels.Labels{ | 
					
						
							|  |  |  | 				labels.Label{ | 
					
						
							|  |  |  | 					Name:  "labelNew", | 
					
						
							|  |  |  | 					Value: "nameNew", | 
					
						
							|  |  |  | 				}, | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 	) | 
					
						
							| 
									
										
										
										
											2020-07-27 16:38:08 +08:00
										 |  |  | 	defer sp.stop() | 
					
						
							| 
									
										
										
										
											2020-03-21 00:43:26 +08:00
										 |  |  | 	sp.sync([]*Target{t1}) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	start := time.Now() | 
					
						
							|  |  |  | 	for i := uint(1); i > 0; i++ { | 
					
						
							|  |  |  | 		if time.Since(start) > 5*time.Second { | 
					
						
							|  |  |  | 			break | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 		sp.reload(&config.ScrapeConfig{ | 
					
						
							|  |  |  | 			JobName:        "Prometheus", | 
					
						
							|  |  |  | 			ScrapeTimeout:  model.Duration(1 * time.Millisecond), | 
					
						
							|  |  |  | 			ScrapeInterval: model.Duration(1 * time.Millisecond), | 
					
						
							|  |  |  | 			MetricsPath:    "/metrics", | 
					
						
							|  |  |  | 			SampleLimit:    i, | 
					
						
							|  |  |  | 		}) | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | } | 
					
						
							| 
									
										
										
										
											2020-05-26 22:14:55 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | func TestCheckAddError(t *testing.T) { | 
					
						
							|  |  |  | 	var appErrs appendErrors | 
					
						
							|  |  |  | 	sl := scrapeLoop{l: log.NewNopLogger()} | 
					
						
							|  |  |  | 	sl.checkAddError(nil, nil, nil, storage.ErrOutOfOrderSample, nil, &appErrs) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.Equal(t, 1, appErrs.numOutOfOrder) | 
					
						
							| 
									
										
										
										
											2020-05-26 22:14:55 +08:00
										 |  |  | } | 
					
						
							| 
									
										
										
										
											2020-07-16 19:53:39 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | func TestScrapeReportSingleAppender(t *testing.T) { | 
					
						
							|  |  |  | 	s := teststorage.New(t) | 
					
						
							|  |  |  | 	defer s.Close() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	var ( | 
					
						
							|  |  |  | 		signal  = make(chan struct{}, 1) | 
					
						
							|  |  |  | 		scraper = &testScraper{} | 
					
						
							|  |  |  | 	) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	ctx, cancel := context.WithCancel(context.Background()) | 
					
						
							|  |  |  | 	sl := newScrapeLoop(ctx, | 
					
						
							|  |  |  | 		scraper, | 
					
						
							|  |  |  | 		nil, nil, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							|  |  |  | 		nopMutator, | 
					
						
							|  |  |  | 		s.Appender, | 
					
						
							|  |  |  | 		nil, | 
					
						
							|  |  |  | 		0, | 
					
						
							|  |  |  | 		true, | 
					
						
							| 
									
										
										
										
											2021-09-03 21:37:42 +08:00
										 |  |  | 		0, | 
					
						
							| 
									
										
										
										
											2021-05-06 16:56:21 +08:00
										 |  |  | 		nil, | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		10*time.Millisecond, | 
					
						
							|  |  |  | 		time.Hour, | 
					
						
							| 
									
										
										
										
											2021-08-24 20:31:14 +08:00
										 |  |  | 		false, | 
					
						
							| 
									
										
										
										
											2020-07-16 19:53:39 +08:00
										 |  |  | 	) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	numScrapes := 0 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	scraper.scrapeFunc = func(ctx context.Context, w io.Writer) error { | 
					
						
							|  |  |  | 		numScrapes++ | 
					
						
							|  |  |  | 		if numScrapes%4 == 0 { | 
					
						
							|  |  |  | 			return fmt.Errorf("scrape failed") | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 		w.Write([]byte("metric_a 44\nmetric_b 44\nmetric_c 44\nmetric_d 44\n")) | 
					
						
							|  |  |  | 		return nil | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	go func() { | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 		sl.run(nil) | 
					
						
							| 
									
										
										
										
											2020-07-16 19:53:39 +08:00
										 |  |  | 		signal <- struct{}{} | 
					
						
							|  |  |  | 	}() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	start := time.Now() | 
					
						
							|  |  |  | 	for time.Since(start) < 3*time.Second { | 
					
						
							|  |  |  | 		q, err := s.Querier(ctx, time.Time{}.UnixNano(), time.Now().UnixNano()) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 		require.NoError(t, err) | 
					
						
							| 
									
										
										
										
											2020-07-16 19:53:39 +08:00
										 |  |  | 		series := q.Select(false, nil, labels.MustNewMatcher(labels.MatchRegexp, "__name__", ".+")) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 		c := 0 | 
					
						
							|  |  |  | 		for series.Next() { | 
					
						
							|  |  |  | 			i := series.At().Iterator() | 
					
						
							|  |  |  | 			for i.Next() { | 
					
						
							|  |  |  | 				c++ | 
					
						
							|  |  |  | 			} | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 		require.Equal(t, 0, c%9, "Appended samples not as expected: %d", c) | 
					
						
							| 
									
										
										
										
											2020-07-16 19:53:39 +08:00
										 |  |  | 		q.Close() | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 	cancel() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	select { | 
					
						
							|  |  |  | 	case <-signal: | 
					
						
							|  |  |  | 	case <-time.After(5 * time.Second): | 
					
						
							|  |  |  | 		t.Fatalf("Scrape wasn't stopped.") | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | } | 
					
						
							| 
									
										
										
										
											2021-05-06 16:56:21 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | func TestScrapeLoopLabelLimit(t *testing.T) { | 
					
						
							|  |  |  | 	tests := []struct { | 
					
						
							|  |  |  | 		title           string | 
					
						
							|  |  |  | 		scrapeLabels    string | 
					
						
							|  |  |  | 		discoveryLabels []string | 
					
						
							|  |  |  | 		labelLimits     labelLimits | 
					
						
							|  |  |  | 		expectErr       bool | 
					
						
							|  |  |  | 	}{ | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			title:           "Valid number of labels", | 
					
						
							|  |  |  | 			scrapeLabels:    `metric{l1="1", l2="2"} 0`, | 
					
						
							|  |  |  | 			discoveryLabels: nil, | 
					
						
							|  |  |  | 			labelLimits:     labelLimits{labelLimit: 5}, | 
					
						
							|  |  |  | 			expectErr:       false, | 
					
						
							|  |  |  | 		}, { | 
					
						
							|  |  |  | 			title:           "Too many labels", | 
					
						
							|  |  |  | 			scrapeLabels:    `metric{l1="1", l2="2", l3="3", l4="4", l5="5", l6="6"} 0`, | 
					
						
							|  |  |  | 			discoveryLabels: nil, | 
					
						
							|  |  |  | 			labelLimits:     labelLimits{labelLimit: 5}, | 
					
						
							|  |  |  | 			expectErr:       true, | 
					
						
							|  |  |  | 		}, { | 
					
						
							|  |  |  | 			title:           "Too many labels including discovery labels", | 
					
						
							|  |  |  | 			scrapeLabels:    `metric{l1="1", l2="2", l3="3", l4="4"} 0`, | 
					
						
							|  |  |  | 			discoveryLabels: []string{"l5", "5", "l6", "6"}, | 
					
						
							|  |  |  | 			labelLimits:     labelLimits{labelLimit: 5}, | 
					
						
							|  |  |  | 			expectErr:       true, | 
					
						
							|  |  |  | 		}, { | 
					
						
							|  |  |  | 			title:           "Valid labels name length", | 
					
						
							|  |  |  | 			scrapeLabels:    `metric{l1="1", l2="2"} 0`, | 
					
						
							|  |  |  | 			discoveryLabels: nil, | 
					
						
							|  |  |  | 			labelLimits:     labelLimits{labelNameLengthLimit: 10}, | 
					
						
							|  |  |  | 			expectErr:       false, | 
					
						
							|  |  |  | 		}, { | 
					
						
							|  |  |  | 			title:           "Label name too long", | 
					
						
							|  |  |  | 			scrapeLabels:    `metric{label_name_too_long="0"} 0`, | 
					
						
							|  |  |  | 			discoveryLabels: nil, | 
					
						
							|  |  |  | 			labelLimits:     labelLimits{labelNameLengthLimit: 10}, | 
					
						
							|  |  |  | 			expectErr:       true, | 
					
						
							|  |  |  | 		}, { | 
					
						
							|  |  |  | 			title:           "Discovery label name too long", | 
					
						
							|  |  |  | 			scrapeLabels:    `metric{l1="1", l2="2"} 0`, | 
					
						
							|  |  |  | 			discoveryLabels: []string{"label_name_too_long", "0"}, | 
					
						
							|  |  |  | 			labelLimits:     labelLimits{labelNameLengthLimit: 10}, | 
					
						
							|  |  |  | 			expectErr:       true, | 
					
						
							|  |  |  | 		}, { | 
					
						
							|  |  |  | 			title:           "Valid labels value length", | 
					
						
							|  |  |  | 			scrapeLabels:    `metric{l1="1", l2="2"} 0`, | 
					
						
							|  |  |  | 			discoveryLabels: nil, | 
					
						
							|  |  |  | 			labelLimits:     labelLimits{labelValueLengthLimit: 10}, | 
					
						
							|  |  |  | 			expectErr:       false, | 
					
						
							|  |  |  | 		}, { | 
					
						
							|  |  |  | 			title:           "Label value too long", | 
					
						
							|  |  |  | 			scrapeLabels:    `metric{l1="label_value_too_long"} 0`, | 
					
						
							|  |  |  | 			discoveryLabels: nil, | 
					
						
							|  |  |  | 			labelLimits:     labelLimits{labelValueLengthLimit: 10}, | 
					
						
							|  |  |  | 			expectErr:       true, | 
					
						
							|  |  |  | 		}, { | 
					
						
							|  |  |  | 			title:           "Discovery label value too long", | 
					
						
							|  |  |  | 			scrapeLabels:    `metric{l1="1", l2="2"} 0`, | 
					
						
							|  |  |  | 			discoveryLabels: []string{"l1", "label_value_too_long"}, | 
					
						
							|  |  |  | 			labelLimits:     labelLimits{labelValueLengthLimit: 10}, | 
					
						
							|  |  |  | 			expectErr:       true, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	for _, test := range tests { | 
					
						
							|  |  |  | 		app := &collectResultAppender{} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 		discoveryLabels := &Target{ | 
					
						
							|  |  |  | 			labels: labels.FromStrings(test.discoveryLabels...), | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 		sl := newScrapeLoop(context.Background(), | 
					
						
							|  |  |  | 			nil, nil, nil, | 
					
						
							|  |  |  | 			func(l labels.Labels) labels.Labels { | 
					
						
							|  |  |  | 				return mutateSampleLabels(l, discoveryLabels, false, nil) | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 			func(l labels.Labels) labels.Labels { | 
					
						
							|  |  |  | 				return mutateReportSampleLabels(l, discoveryLabels) | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 			func(ctx context.Context) storage.Appender { return app }, | 
					
						
							|  |  |  | 			nil, | 
					
						
							|  |  |  | 			0, | 
					
						
							|  |  |  | 			true, | 
					
						
							| 
									
										
										
										
											2021-09-03 21:37:42 +08:00
										 |  |  | 			0, | 
					
						
							| 
									
										
										
										
											2021-05-06 16:56:21 +08:00
										 |  |  | 			&test.labelLimits, | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 			0, | 
					
						
							|  |  |  | 			0, | 
					
						
							| 
									
										
										
										
											2021-08-24 20:31:14 +08:00
										 |  |  | 			false, | 
					
						
							| 
									
										
										
										
											2021-05-06 16:56:21 +08:00
										 |  |  | 		) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 		slApp := sl.appender(context.Background()) | 
					
						
							|  |  |  | 		_, _, _, err := sl.append(slApp, []byte(test.scrapeLabels), "", time.Now()) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 		t.Logf("Test:%s", test.title) | 
					
						
							|  |  |  | 		if test.expectErr { | 
					
						
							|  |  |  | 			require.Error(t, err) | 
					
						
							|  |  |  | 		} else { | 
					
						
							|  |  |  | 			require.NoError(t, err) | 
					
						
							|  |  |  | 			require.NoError(t, slApp.Commit()) | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | } | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | func TestTargetScrapeIntervalAndTimeoutRelabel(t *testing.T) { | 
					
						
							|  |  |  | 	interval, _ := model.ParseDuration("2s") | 
					
						
							|  |  |  | 	timeout, _ := model.ParseDuration("500ms") | 
					
						
							|  |  |  | 	config := &config.ScrapeConfig{ | 
					
						
							|  |  |  | 		ScrapeInterval: interval, | 
					
						
							|  |  |  | 		ScrapeTimeout:  timeout, | 
					
						
							|  |  |  | 		RelabelConfigs: []*relabel.Config{ | 
					
						
							|  |  |  | 			{ | 
					
						
							|  |  |  | 				SourceLabels: model.LabelNames{model.ScrapeIntervalLabel}, | 
					
						
							|  |  |  | 				Regex:        relabel.MustNewRegexp("2s"), | 
					
						
							|  |  |  | 				Replacement:  "3s", | 
					
						
							|  |  |  | 				TargetLabel:  model.ScrapeIntervalLabel, | 
					
						
							|  |  |  | 				Action:       relabel.Replace, | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 			{ | 
					
						
							|  |  |  | 				SourceLabels: model.LabelNames{model.ScrapeTimeoutLabel}, | 
					
						
							|  |  |  | 				Regex:        relabel.MustNewRegexp("500ms"), | 
					
						
							|  |  |  | 				Replacement:  "750ms", | 
					
						
							|  |  |  | 				TargetLabel:  model.ScrapeTimeoutLabel, | 
					
						
							|  |  |  | 				Action:       relabel.Replace, | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 	} | 
					
						
							| 
									
										
										
										
											2021-08-24 20:31:14 +08:00
										 |  |  | 	sp, _ := newScrapePool(config, &nopAppendable{}, 0, nil, false) | 
					
						
							| 
									
										
										
										
											2021-08-31 23:37:32 +08:00
										 |  |  | 	tgts := []*targetgroup.Group{ | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			Targets: []model.LabelSet{{model.AddressLabel: "127.0.0.1:9090"}}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	sp.Sync(tgts) | 
					
						
							|  |  |  | 	defer sp.stop() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	require.Equal(t, "3s", sp.ActiveTargets()[0].labels.Get(model.ScrapeIntervalLabel)) | 
					
						
							|  |  |  | 	require.Equal(t, "750ms", sp.ActiveTargets()[0].labels.Get(model.ScrapeTimeoutLabel)) | 
					
						
							|  |  |  | } |