| 
									
										
										
										
											2017-04-11 02:59:45 +08:00
										 |  |  | // Copyright 2017 The Prometheus Authors
 | 
					
						
							|  |  |  | // Licensed under the Apache License, Version 2.0 (the "License");
 | 
					
						
							|  |  |  | // you may not use this file except in compliance with the License.
 | 
					
						
							|  |  |  | // You may obtain a copy of the License at
 | 
					
						
							|  |  |  | //
 | 
					
						
							|  |  |  | // http://www.apache.org/licenses/LICENSE-2.0
 | 
					
						
							|  |  |  | //
 | 
					
						
							|  |  |  | // Unless required by applicable law or agreed to in writing, software
 | 
					
						
							|  |  |  | // distributed under the License is distributed on an "AS IS" BASIS,
 | 
					
						
							|  |  |  | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 | 
					
						
							|  |  |  | // See the License for the specific language governing permissions and
 | 
					
						
							|  |  |  | // limitations under the License.
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-12-14 22:47:05 +08:00
										 |  |  | package tsdb | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | import ( | 
					
						
							| 
									
										
										
										
											2020-02-06 23:58:38 +08:00
										 |  |  | 	"context" | 
					
						
							| 
									
										
										
										
											2017-07-05 22:19:28 +08:00
										 |  |  | 	"fmt" | 
					
						
							| 
									
										
										
										
											2018-10-24 05:35:52 +08:00
										 |  |  | 	"io/ioutil" | 
					
						
							| 
									
										
										
										
											2017-04-14 03:06:14 +08:00
										 |  |  | 	"math" | 
					
						
							| 
									
										
										
										
											2017-04-13 22:27:31 +08:00
										 |  |  | 	"math/rand" | 
					
						
							| 
									
										
										
										
											2018-10-24 05:35:52 +08:00
										 |  |  | 	"os" | 
					
						
							| 
									
										
										
										
											2018-10-25 17:32:57 +08:00
										 |  |  | 	"path/filepath" | 
					
						
							| 
									
										
										
										
											2016-12-19 18:44:11 +08:00
										 |  |  | 	"sort" | 
					
						
							| 
									
										
										
										
											2019-06-07 21:41:44 +08:00
										 |  |  | 	"strconv" | 
					
						
							| 
									
										
										
										
											2016-12-14 22:47:05 +08:00
										 |  |  | 	"testing" | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 	"time" | 
					
						
							| 
									
										
										
										
											2016-12-14 22:47:05 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-11-30 22:34:49 +08:00
										 |  |  | 	"github.com/pkg/errors" | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	"github.com/stretchr/testify/require" | 
					
						
							| 
									
										
										
										
											2020-10-22 17:00:08 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-11-08 22:23:17 +08:00
										 |  |  | 	"github.com/prometheus/prometheus/model/labels" | 
					
						
							| 
									
										
										
										
											2020-02-06 23:58:38 +08:00
										 |  |  | 	"github.com/prometheus/prometheus/storage" | 
					
						
							| 
									
										
										
										
											2019-08-13 16:34:14 +08:00
										 |  |  | 	"github.com/prometheus/prometheus/tsdb/chunkenc" | 
					
						
							|  |  |  | 	"github.com/prometheus/prometheus/tsdb/chunks" | 
					
						
							|  |  |  | 	"github.com/prometheus/prometheus/tsdb/index" | 
					
						
							| 
									
										
										
										
											2019-09-19 17:15:41 +08:00
										 |  |  | 	"github.com/prometheus/prometheus/tsdb/tombstones" | 
					
						
							| 
									
										
										
										
											2019-08-13 16:34:14 +08:00
										 |  |  | 	"github.com/prometheus/prometheus/tsdb/tsdbutil" | 
					
						
							| 
									
										
										
										
											2016-12-14 22:47:05 +08:00
										 |  |  | ) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | // TODO(bwplotka): Replace those mocks with remote.concreteSeriesSet.
 | 
					
						
							| 
									
										
										
										
											2018-09-21 16:07:35 +08:00
										 |  |  | type mockSeriesSet struct { | 
					
						
							|  |  |  | 	next   func() bool | 
					
						
							| 
									
										
										
										
											2020-02-06 23:58:38 +08:00
										 |  |  | 	series func() storage.Series | 
					
						
							| 
									
										
										
										
											2020-06-10 00:57:31 +08:00
										 |  |  | 	ws     func() storage.Warnings | 
					
						
							| 
									
										
										
										
											2018-09-21 16:07:35 +08:00
										 |  |  | 	err    func() error | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-06-10 00:57:31 +08:00
										 |  |  | func (m *mockSeriesSet) Next() bool                 { return m.next() } | 
					
						
							|  |  |  | func (m *mockSeriesSet) At() storage.Series         { return m.series() } | 
					
						
							|  |  |  | func (m *mockSeriesSet) Err() error                 { return m.err() } | 
					
						
							|  |  |  | func (m *mockSeriesSet) Warnings() storage.Warnings { return m.ws() } | 
					
						
							| 
									
										
										
										
											2018-09-21 16:07:35 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-02-06 23:58:38 +08:00
										 |  |  | func newMockSeriesSet(list []storage.Series) *mockSeriesSet { | 
					
						
							| 
									
										
										
										
											2018-09-21 16:07:35 +08:00
										 |  |  | 	i := -1 | 
					
						
							|  |  |  | 	return &mockSeriesSet{ | 
					
						
							|  |  |  | 		next: func() bool { | 
					
						
							|  |  |  | 			i++ | 
					
						
							|  |  |  | 			return i < len(list) | 
					
						
							|  |  |  | 		}, | 
					
						
							| 
									
										
										
										
											2020-02-06 23:58:38 +08:00
										 |  |  | 		series: func() storage.Series { | 
					
						
							| 
									
										
										
										
											2018-09-21 16:07:35 +08:00
										 |  |  | 			return list[i] | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		err: func() error { return nil }, | 
					
						
							| 
									
										
										
										
											2020-06-10 00:57:31 +08:00
										 |  |  | 		ws:  func() storage.Warnings { return nil }, | 
					
						
							| 
									
										
										
										
											2018-09-21 16:07:35 +08:00
										 |  |  | 	} | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | type mockChunkSeriesSet struct { | 
					
						
							|  |  |  | 	next   func() bool | 
					
						
							|  |  |  | 	series func() storage.ChunkSeries | 
					
						
							|  |  |  | 	ws     func() storage.Warnings | 
					
						
							|  |  |  | 	err    func() error | 
					
						
							| 
									
										
										
										
											2016-12-19 18:44:11 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | func (m *mockChunkSeriesSet) Next() bool                 { return m.next() } | 
					
						
							|  |  |  | func (m *mockChunkSeriesSet) At() storage.ChunkSeries    { return m.series() } | 
					
						
							|  |  |  | func (m *mockChunkSeriesSet) Err() error                 { return m.err() } | 
					
						
							|  |  |  | func (m *mockChunkSeriesSet) Warnings() storage.Warnings { return m.ws() } | 
					
						
							| 
									
										
										
										
											2016-12-19 18:44:11 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | func newMockChunkSeriesSet(list []storage.ChunkSeries) *mockChunkSeriesSet { | 
					
						
							|  |  |  | 	i := -1 | 
					
						
							|  |  |  | 	return &mockChunkSeriesSet{ | 
					
						
							|  |  |  | 		next: func() bool { | 
					
						
							|  |  |  | 			i++ | 
					
						
							|  |  |  | 			return i < len(list) | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		series: func() storage.ChunkSeries { | 
					
						
							|  |  |  | 			return list[i] | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		err: func() error { return nil }, | 
					
						
							|  |  |  | 		ws:  func() storage.Warnings { return nil }, | 
					
						
							|  |  |  | 	} | 
					
						
							| 
									
										
										
										
											2016-12-19 18:44:11 +08:00
										 |  |  | } | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2018-10-12 17:45:19 +08:00
										 |  |  | type seriesSamples struct { | 
					
						
							| 
									
										
										
										
											2017-04-13 22:27:31 +08:00
										 |  |  | 	lset   map[string]string | 
					
						
							|  |  |  | 	chunks [][]sample | 
					
						
							| 
									
										
										
										
											2018-10-12 17:45:19 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | // Index: labels -> postings -> chunkMetas -> chunkRef
 | 
					
						
							|  |  |  | // ChunkReader: ref -> vals
 | 
					
						
							| 
									
										
										
										
											2019-04-25 18:07:04 +08:00
										 |  |  | func createIdxChkReaders(t *testing.T, tc []seriesSamples) (IndexReader, ChunkReader, int64, int64) { | 
					
						
							| 
									
										
										
										
											2017-04-13 22:27:31 +08:00
										 |  |  | 	sort.Slice(tc, func(i, j int) bool { | 
					
						
							|  |  |  | 		return labels.Compare(labels.FromMap(tc[i].lset), labels.FromMap(tc[i].lset)) < 0 | 
					
						
							|  |  |  | 	}) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-11-30 22:34:49 +08:00
										 |  |  | 	postings := index.NewMemPostings() | 
					
						
							| 
									
										
										
										
											2021-11-06 18:10:04 +08:00
										 |  |  | 	chkReader := mockChunkReader(make(map[chunks.ChunkRef]chunkenc.Chunk)) | 
					
						
							| 
									
										
										
										
											2020-09-10 23:05:47 +08:00
										 |  |  | 	lblIdx := make(map[string]map[string]struct{}) | 
					
						
							| 
									
										
										
										
											2017-04-13 22:27:31 +08:00
										 |  |  | 	mi := newMockIndex() | 
					
						
							| 
									
										
										
										
											2019-02-14 21:29:41 +08:00
										 |  |  | 	blockMint := int64(math.MaxInt64) | 
					
						
							|  |  |  | 	blockMaxt := int64(math.MinInt64) | 
					
						
							| 
									
										
										
										
											2017-04-13 22:27:31 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-11-06 18:10:04 +08:00
										 |  |  | 	var chunkRef chunks.ChunkRef | 
					
						
							| 
									
										
										
										
											2017-04-13 22:27:31 +08:00
										 |  |  | 	for i, s := range tc { | 
					
						
							| 
									
										
										
										
											2017-05-22 16:01:57 +08:00
										 |  |  | 		i = i + 1 // 0 is not a valid posting.
 | 
					
						
							| 
									
										
										
										
											2017-11-30 22:34:49 +08:00
										 |  |  | 		metas := make([]chunks.Meta, 0, len(s.chunks)) | 
					
						
							| 
									
										
										
										
											2017-04-13 22:27:31 +08:00
										 |  |  | 		for _, chk := range s.chunks { | 
					
						
							| 
									
										
										
										
											2019-02-14 21:29:41 +08:00
										 |  |  | 			if chk[0].t < blockMint { | 
					
						
							|  |  |  | 				blockMint = chk[0].t | 
					
						
							|  |  |  | 			} | 
					
						
							|  |  |  | 			if chk[len(chk)-1].t > blockMaxt { | 
					
						
							|  |  |  | 				blockMaxt = chk[len(chk)-1].t | 
					
						
							|  |  |  | 			} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-11-30 22:34:49 +08:00
										 |  |  | 			metas = append(metas, chunks.Meta{ | 
					
						
							| 
									
										
										
										
											2017-04-13 22:27:31 +08:00
										 |  |  | 				MinTime: chk[0].t, | 
					
						
							|  |  |  | 				MaxTime: chk[len(chk)-1].t, | 
					
						
							| 
									
										
										
										
											2019-04-18 21:11:39 +08:00
										 |  |  | 				Ref:     chunkRef, | 
					
						
							| 
									
										
										
										
											2017-04-13 22:27:31 +08:00
										 |  |  | 			}) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-11-30 22:34:49 +08:00
										 |  |  | 			chunk := chunkenc.NewXORChunk() | 
					
						
							| 
									
										
										
										
											2017-04-13 22:27:31 +08:00
										 |  |  | 			app, _ := chunk.Appender() | 
					
						
							|  |  |  | 			for _, smpl := range chk { | 
					
						
							|  |  |  | 				app.Append(smpl.t, smpl.v) | 
					
						
							|  |  |  | 			} | 
					
						
							| 
									
										
										
										
											2019-04-18 21:11:39 +08:00
										 |  |  | 			chkReader[chunkRef] = chunk | 
					
						
							| 
									
										
										
										
											2020-03-23 22:47:11 +08:00
										 |  |  | 			chunkRef++ | 
					
						
							| 
									
										
										
										
											2017-04-13 22:27:31 +08:00
										 |  |  | 		} | 
					
						
							| 
									
										
										
										
											2017-07-21 16:37:52 +08:00
										 |  |  | 		ls := labels.FromMap(s.lset) | 
					
						
							| 
									
										
										
										
											2021-11-06 18:10:04 +08:00
										 |  |  | 		require.NoError(t, mi.AddSeries(storage.SeriesRef(i), ls, metas...)) | 
					
						
							| 
									
										
										
										
											2017-04-13 22:27:31 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-11-06 18:10:04 +08:00
										 |  |  | 		postings.Add(storage.SeriesRef(i), ls) | 
					
						
							| 
									
										
										
										
											2017-07-21 16:37:52 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-09-05 17:45:18 +08:00
										 |  |  | 		for _, l := range ls { | 
					
						
							| 
									
										
										
										
											2017-07-21 16:37:52 +08:00
										 |  |  | 			vs, present := lblIdx[l.Name] | 
					
						
							|  |  |  | 			if !present { | 
					
						
							| 
									
										
										
										
											2020-09-10 23:05:47 +08:00
										 |  |  | 				vs = map[string]struct{}{} | 
					
						
							| 
									
										
										
										
											2017-07-21 16:37:52 +08:00
										 |  |  | 				lblIdx[l.Name] = vs | 
					
						
							|  |  |  | 			} | 
					
						
							| 
									
										
										
										
											2020-09-10 23:05:47 +08:00
										 |  |  | 			vs[l.Value] = struct{}{} | 
					
						
							| 
									
										
										
										
											2017-04-13 22:27:31 +08:00
										 |  |  | 		} | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.NoError(t, postings.Iter(func(l labels.Label, p index.Postings) error { | 
					
						
							| 
									
										
										
										
											2017-11-30 22:34:49 +08:00
										 |  |  | 		return mi.WritePostings(l.Name, l.Value, p) | 
					
						
							| 
									
										
										
										
											2019-04-25 18:07:04 +08:00
										 |  |  | 	})) | 
					
						
							| 
									
										
										
										
											2019-02-14 21:29:41 +08:00
										 |  |  | 	return mi, chkReader, blockMint, blockMaxt | 
					
						
							| 
									
										
										
										
											2017-04-13 22:27:31 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | type blockQuerierTestCase struct { | 
					
						
							|  |  |  | 	mint, maxt int64 | 
					
						
							|  |  |  | 	ms         []*labels.Matcher | 
					
						
							| 
									
										
										
										
											2021-11-03 18:08:34 +08:00
										 |  |  | 	hints      *storage.SelectHints | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 	exp        storage.SeriesSet | 
					
						
							|  |  |  | 	expChks    storage.ChunkSeriesSet | 
					
						
							|  |  |  | } | 
					
						
							| 
									
										
										
										
											2017-04-22 04:08:26 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | func testBlockQuerier(t *testing.T, c blockQuerierTestCase, ir IndexReader, cr ChunkReader, stones *tombstones.MemTombstones) { | 
					
						
							|  |  |  | 	t.Run("sample", func(t *testing.T) { | 
					
						
							|  |  |  | 		q := blockQuerier{ | 
					
						
							|  |  |  | 			blockBaseQuerier: &blockBaseQuerier{ | 
					
						
							|  |  |  | 				index:      ir, | 
					
						
							|  |  |  | 				chunks:     cr, | 
					
						
							|  |  |  | 				tombstones: stones, | 
					
						
							| 
									
										
										
										
											2017-04-22 04:08:26 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 				mint: c.mint, | 
					
						
							|  |  |  | 				maxt: c.maxt, | 
					
						
							| 
									
										
										
										
											2017-04-13 22:27:31 +08:00
										 |  |  | 			}, | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-11-03 18:08:34 +08:00
										 |  |  | 		res := q.Select(false, c.hints, c.ms...) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 		defer func() { require.NoError(t, q.Close()) }() | 
					
						
							| 
									
										
										
										
											2017-04-13 22:27:31 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 		for { | 
					
						
							|  |  |  | 			eok, rok := c.exp.Next(), res.Next() | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 			require.Equal(t, eok, rok) | 
					
						
							| 
									
										
										
										
											2017-04-13 22:27:31 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 			if !eok { | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 				require.Equal(t, 0, len(res.Warnings())) | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 				break | 
					
						
							| 
									
										
										
										
											2017-04-13 22:27:31 +08:00
										 |  |  | 			} | 
					
						
							|  |  |  | 			sexp := c.exp.At() | 
					
						
							|  |  |  | 			sres := res.At() | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 			require.Equal(t, sexp.Labels(), sres.Labels()) | 
					
						
							| 
									
										
										
										
											2017-04-13 22:27:31 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			smplExp, errExp := storage.ExpandSamples(sexp.Iterator(), nil) | 
					
						
							|  |  |  | 			smplRes, errRes := storage.ExpandSamples(sres.Iterator(), nil) | 
					
						
							| 
									
										
										
										
											2017-04-13 22:27:31 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 			require.Equal(t, errExp, errRes) | 
					
						
							|  |  |  | 			require.Equal(t, smplExp, smplRes) | 
					
						
							| 
									
										
										
										
											2017-04-13 22:27:31 +08:00
										 |  |  | 		} | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 		require.NoError(t, res.Err()) | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 	}) | 
					
						
							| 
									
										
										
										
											2017-05-22 16:01:57 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 	t.Run("chunk", func(t *testing.T) { | 
					
						
							|  |  |  | 		q := blockChunkQuerier{ | 
					
						
							|  |  |  | 			blockBaseQuerier: &blockBaseQuerier{ | 
					
						
							|  |  |  | 				index:      ir, | 
					
						
							|  |  |  | 				chunks:     cr, | 
					
						
							|  |  |  | 				tombstones: stones, | 
					
						
							| 
									
										
										
										
											2017-05-22 16:01:57 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 				mint: c.mint, | 
					
						
							|  |  |  | 				maxt: c.maxt, | 
					
						
							| 
									
										
										
										
											2017-05-22 16:01:57 +08:00
										 |  |  | 			}, | 
					
						
							|  |  |  | 		} | 
					
						
							| 
									
										
										
										
											2021-11-03 18:08:34 +08:00
										 |  |  | 		res := q.Select(false, c.hints, c.ms...) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 		defer func() { require.NoError(t, q.Close()) }() | 
					
						
							| 
									
										
										
										
											2017-05-22 16:01:57 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 		for { | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			eok, rok := c.expChks.Next(), res.Next() | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 			require.Equal(t, eok, rok) | 
					
						
							| 
									
										
										
										
											2017-05-22 16:01:57 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 			if !eok { | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 				require.Equal(t, 0, len(res.Warnings())) | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 				break | 
					
						
							| 
									
										
										
										
											2017-05-22 16:01:57 +08:00
										 |  |  | 			} | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			sexpChks := c.expChks.At() | 
					
						
							| 
									
										
										
										
											2017-05-22 16:01:57 +08:00
										 |  |  | 			sres := res.At() | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 			require.Equal(t, sexpChks.Labels(), sres.Labels()) | 
					
						
							| 
									
										
										
										
											2017-05-22 16:01:57 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			chksExp, errExp := storage.ExpandChunks(sexpChks.Iterator()) | 
					
						
							|  |  |  | 			rmChunkRefs(chksExp) | 
					
						
							|  |  |  | 			chksRes, errRes := storage.ExpandChunks(sres.Iterator()) | 
					
						
							|  |  |  | 			rmChunkRefs(chksRes) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 			require.Equal(t, errExp, errRes) | 
					
						
							|  |  |  | 			require.Equal(t, chksExp, chksRes) | 
					
						
							| 
									
										
										
										
											2017-05-22 16:01:57 +08:00
										 |  |  | 		} | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 		require.NoError(t, res.Err()) | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 	}) | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | func TestBlockQuerier(t *testing.T) { | 
					
						
							|  |  |  | 	for _, c := range []blockQuerierTestCase{ | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			mint:    0, | 
					
						
							|  |  |  | 			maxt:    0, | 
					
						
							|  |  |  | 			ms:      []*labels.Matcher{}, | 
					
						
							|  |  |  | 			exp:     newMockSeriesSet([]storage.Series{}), | 
					
						
							|  |  |  | 			expChks: newMockChunkSeriesSet([]storage.ChunkSeries{}), | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			mint:    0, | 
					
						
							|  |  |  | 			maxt:    0, | 
					
						
							|  |  |  | 			ms:      []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")}, | 
					
						
							|  |  |  | 			exp:     newMockSeriesSet([]storage.Series{}), | 
					
						
							|  |  |  | 			expChks: newMockChunkSeriesSet([]storage.ChunkSeries{}), | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			mint:    1, | 
					
						
							|  |  |  | 			maxt:    0, | 
					
						
							|  |  |  | 			ms:      []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")}, | 
					
						
							|  |  |  | 			exp:     newMockSeriesSet([]storage.Series{}), | 
					
						
							|  |  |  | 			expChks: newMockChunkSeriesSet([]storage.ChunkSeries{}), | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			mint:    math.MinInt64, | 
					
						
							|  |  |  | 			maxt:    math.MaxInt64, | 
					
						
							|  |  |  | 			ms:      []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "x")}, | 
					
						
							|  |  |  | 			exp:     newMockSeriesSet([]storage.Series{}), | 
					
						
							|  |  |  | 			expChks: newMockChunkSeriesSet([]storage.ChunkSeries{}), | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			mint: math.MinInt64, | 
					
						
							|  |  |  | 			maxt: math.MaxInt64, | 
					
						
							|  |  |  | 			ms:   []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "a", ".*")}, | 
					
						
							|  |  |  | 			exp: newMockSeriesSet([]storage.Series{ | 
					
						
							|  |  |  | 				storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{1, 2}, sample{2, 3}, sample{3, 4}, sample{5, 2}, sample{6, 3}, sample{7, 4}}, | 
					
						
							|  |  |  | 				), | 
					
						
							|  |  |  | 				storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{1, 1}, sample{2, 2}, sample{3, 3}, sample{5, 3}, sample{6, 6}}, | 
					
						
							|  |  |  | 				), | 
					
						
							|  |  |  | 				storage.NewListSeries(labels.Labels{{Name: "b", Value: "b"}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{1, 3}, sample{2, 2}, sample{3, 6}, sample{5, 1}, sample{6, 7}, sample{7, 2}}, | 
					
						
							|  |  |  | 				), | 
					
						
							|  |  |  | 			}), | 
					
						
							|  |  |  | 			expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ | 
					
						
							|  |  |  | 				storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{1, 2}, sample{2, 3}, sample{3, 4}}, []tsdbutil.Sample{sample{5, 2}, sample{6, 3}, sample{7, 4}}, | 
					
						
							|  |  |  | 				), | 
					
						
							|  |  |  | 				storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{1, 1}, sample{2, 2}, sample{3, 3}}, []tsdbutil.Sample{sample{5, 3}, sample{6, 6}}, | 
					
						
							|  |  |  | 				), | 
					
						
							|  |  |  | 				storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "b", Value: "b"}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{1, 3}, sample{2, 2}, sample{3, 6}}, []tsdbutil.Sample{sample{5, 1}, sample{6, 7}, sample{7, 2}}, | 
					
						
							|  |  |  | 				), | 
					
						
							|  |  |  | 			}), | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			mint: 2, | 
					
						
							|  |  |  | 			maxt: 6, | 
					
						
							|  |  |  | 			ms:   []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")}, | 
					
						
							|  |  |  | 			exp: newMockSeriesSet([]storage.Series{ | 
					
						
							|  |  |  | 				storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{2, 3}, sample{3, 4}, sample{5, 2}, sample{6, 3}}, | 
					
						
							|  |  |  | 				), | 
					
						
							|  |  |  | 				storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{2, 2}, sample{3, 3}, sample{5, 3}, sample{6, 6}}, | 
					
						
							|  |  |  | 				), | 
					
						
							|  |  |  | 			}), | 
					
						
							|  |  |  | 			expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ | 
					
						
							|  |  |  | 				storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{2, 3}, sample{3, 4}}, []tsdbutil.Sample{sample{5, 2}, sample{6, 3}}, | 
					
						
							|  |  |  | 				), | 
					
						
							|  |  |  | 				storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{2, 2}, sample{3, 3}}, []tsdbutil.Sample{sample{5, 3}, sample{6, 6}}, | 
					
						
							|  |  |  | 				), | 
					
						
							|  |  |  | 			}), | 
					
						
							|  |  |  | 		}, | 
					
						
							| 
									
										
										
										
											2021-11-03 18:08:34 +08:00
										 |  |  | 		{ | 
					
						
							|  |  |  | 			// This test runs a query disabling trimming. All chunks containing at least 1 sample within the queried
 | 
					
						
							|  |  |  | 			// time range will be returned.
 | 
					
						
							|  |  |  | 			mint:  2, | 
					
						
							|  |  |  | 			maxt:  6, | 
					
						
							|  |  |  | 			hints: &storage.SelectHints{Start: 2, End: 6, DisableTrimming: true}, | 
					
						
							|  |  |  | 			ms:    []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")}, | 
					
						
							|  |  |  | 			exp: newMockSeriesSet([]storage.Series{ | 
					
						
							|  |  |  | 				storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{1, 2}, sample{2, 3}, sample{3, 4}, sample{5, 2}, sample{6, 3}, sample{7, 4}}, | 
					
						
							|  |  |  | 				), | 
					
						
							|  |  |  | 				storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{1, 1}, sample{2, 2}, sample{3, 3}, sample{5, 3}, sample{6, 6}}, | 
					
						
							|  |  |  | 				), | 
					
						
							|  |  |  | 			}), | 
					
						
							|  |  |  | 			expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ | 
					
						
							|  |  |  | 				storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{1, 2}, sample{2, 3}, sample{3, 4}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{5, 2}, sample{6, 3}, sample{7, 4}}, | 
					
						
							|  |  |  | 				), | 
					
						
							|  |  |  | 				storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{1, 1}, sample{2, 2}, sample{3, 3}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{5, 3}, sample{6, 6}}, | 
					
						
							|  |  |  | 				), | 
					
						
							|  |  |  | 			}), | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			// This test runs a query disabling trimming. All chunks containing at least 1 sample within the queried
 | 
					
						
							|  |  |  | 			// time range will be returned.
 | 
					
						
							|  |  |  | 			mint:  5, | 
					
						
							|  |  |  | 			maxt:  6, | 
					
						
							|  |  |  | 			hints: &storage.SelectHints{Start: 5, End: 6, DisableTrimming: true}, | 
					
						
							|  |  |  | 			ms:    []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")}, | 
					
						
							|  |  |  | 			exp: newMockSeriesSet([]storage.Series{ | 
					
						
							|  |  |  | 				storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{5, 2}, sample{6, 3}, sample{7, 4}}, | 
					
						
							|  |  |  | 				), | 
					
						
							|  |  |  | 				storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{5, 3}, sample{6, 6}}, | 
					
						
							|  |  |  | 				), | 
					
						
							|  |  |  | 			}), | 
					
						
							|  |  |  | 			expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ | 
					
						
							|  |  |  | 				storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{5, 2}, sample{6, 3}, sample{7, 4}}, | 
					
						
							|  |  |  | 				), | 
					
						
							|  |  |  | 				storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{5, 3}, sample{6, 6}}, | 
					
						
							|  |  |  | 				), | 
					
						
							|  |  |  | 			}), | 
					
						
							|  |  |  | 		}, | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 	} { | 
					
						
							|  |  |  | 		t.Run("", func(t *testing.T) { | 
					
						
							|  |  |  | 			ir, cr, _, _ := createIdxChkReaders(t, testData) | 
					
						
							|  |  |  | 			testBlockQuerier(t, c, ir, cr, tombstones.NewMemTombstones()) | 
					
						
							|  |  |  | 		}) | 
					
						
							| 
									
										
										
										
											2017-05-22 16:01:57 +08:00
										 |  |  | 	} | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | func TestBlockQuerier_AgainstHeadWithOpenChunks(t *testing.T) { | 
					
						
							|  |  |  | 	for _, c := range []blockQuerierTestCase{ | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			mint:    0, | 
					
						
							|  |  |  | 			maxt:    0, | 
					
						
							|  |  |  | 			ms:      []*labels.Matcher{}, | 
					
						
							|  |  |  | 			exp:     newMockSeriesSet([]storage.Series{}), | 
					
						
							|  |  |  | 			expChks: newMockChunkSeriesSet([]storage.ChunkSeries{}), | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			mint:    0, | 
					
						
							|  |  |  | 			maxt:    0, | 
					
						
							|  |  |  | 			ms:      []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")}, | 
					
						
							|  |  |  | 			exp:     newMockSeriesSet([]storage.Series{}), | 
					
						
							|  |  |  | 			expChks: newMockChunkSeriesSet([]storage.ChunkSeries{}), | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			mint:    1, | 
					
						
							|  |  |  | 			maxt:    0, | 
					
						
							|  |  |  | 			ms:      []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")}, | 
					
						
							|  |  |  | 			exp:     newMockSeriesSet([]storage.Series{}), | 
					
						
							|  |  |  | 			expChks: newMockChunkSeriesSet([]storage.ChunkSeries{}), | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			mint:    math.MinInt64, | 
					
						
							|  |  |  | 			maxt:    math.MaxInt64, | 
					
						
							|  |  |  | 			ms:      []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "x")}, | 
					
						
							|  |  |  | 			exp:     newMockSeriesSet([]storage.Series{}), | 
					
						
							|  |  |  | 			expChks: newMockChunkSeriesSet([]storage.ChunkSeries{}), | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			mint: math.MinInt64, | 
					
						
							|  |  |  | 			maxt: math.MaxInt64, | 
					
						
							|  |  |  | 			ms:   []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "a", ".*")}, | 
					
						
							|  |  |  | 			exp: newMockSeriesSet([]storage.Series{ | 
					
						
							|  |  |  | 				storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{1, 2}, sample{2, 3}, sample{3, 4}, sample{5, 2}, sample{6, 3}, sample{7, 4}}, | 
					
						
							|  |  |  | 				), | 
					
						
							|  |  |  | 				storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{1, 1}, sample{2, 2}, sample{3, 3}, sample{5, 3}, sample{6, 6}}, | 
					
						
							|  |  |  | 				), | 
					
						
							|  |  |  | 				storage.NewListSeries(labels.Labels{{Name: "b", Value: "b"}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{1, 3}, sample{2, 2}, sample{3, 6}, sample{5, 1}, sample{6, 7}, sample{7, 2}}, | 
					
						
							|  |  |  | 				), | 
					
						
							|  |  |  | 			}), | 
					
						
							|  |  |  | 			expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ | 
					
						
							|  |  |  | 				storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{1, 2}, sample{2, 3}, sample{3, 4}, sample{5, 2}, sample{6, 3}, sample{7, 4}}, | 
					
						
							|  |  |  | 				), | 
					
						
							|  |  |  | 				storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{1, 1}, sample{2, 2}, sample{3, 3}, sample{5, 3}, sample{6, 6}}, | 
					
						
							|  |  |  | 				), | 
					
						
							|  |  |  | 				storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "b", Value: "b"}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{1, 3}, sample{2, 2}, sample{3, 6}, sample{5, 1}, sample{6, 7}, sample{7, 2}}, | 
					
						
							|  |  |  | 				), | 
					
						
							|  |  |  | 			}), | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			mint: 2, | 
					
						
							|  |  |  | 			maxt: 6, | 
					
						
							|  |  |  | 			ms:   []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")}, | 
					
						
							|  |  |  | 			exp: newMockSeriesSet([]storage.Series{ | 
					
						
							|  |  |  | 				storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{2, 3}, sample{3, 4}, sample{5, 2}, sample{6, 3}}, | 
					
						
							|  |  |  | 				), | 
					
						
							|  |  |  | 				storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{2, 2}, sample{3, 3}, sample{5, 3}, sample{6, 6}}, | 
					
						
							|  |  |  | 				), | 
					
						
							|  |  |  | 			}), | 
					
						
							|  |  |  | 			expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ | 
					
						
							|  |  |  | 				storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{2, 3}, sample{3, 4}, sample{5, 2}, sample{6, 3}}, | 
					
						
							|  |  |  | 				), | 
					
						
							|  |  |  | 				storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{2, 2}, sample{3, 3}, sample{5, 3}, sample{6, 6}}, | 
					
						
							|  |  |  | 				), | 
					
						
							|  |  |  | 			}), | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 	} { | 
					
						
							|  |  |  | 		t.Run("", func(t *testing.T) { | 
					
						
							| 
									
										
										
										
											2021-02-09 22:12:48 +08:00
										 |  |  | 			opts := DefaultHeadOptions() | 
					
						
							|  |  |  | 			opts.ChunkRange = 2 * time.Hour.Milliseconds() | 
					
						
							| 
									
										
										
										
											2021-06-05 22:29:32 +08:00
										 |  |  | 			h, err := NewHead(nil, nil, nil, opts, nil) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 			require.NoError(t, err) | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			defer h.Close() | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:33:54 +08:00
										 |  |  | 			app := h.Appender(context.Background()) | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			for _, s := range testData { | 
					
						
							|  |  |  | 				for _, chk := range s.chunks { | 
					
						
							|  |  |  | 					for _, sample := range chk { | 
					
						
							| 
									
										
										
										
											2021-02-18 20:07:00 +08:00
										 |  |  | 						_, err = app.Append(0, labels.FromMap(s.lset), sample.t, sample.v) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 						require.NoError(t, err) | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 					} | 
					
						
							|  |  |  | 				} | 
					
						
							|  |  |  | 			} | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 			require.NoError(t, app.Commit()) | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 			hr := NewRangeHead(h, c.mint, c.maxt) | 
					
						
							|  |  |  | 			ir, err := hr.Index() | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 			require.NoError(t, err) | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			defer ir.Close() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 			cr, err := hr.Chunks() | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 			require.NoError(t, err) | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			defer cr.Close() | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			testBlockQuerier(t, c, ir, cr, tombstones.NewMemTombstones()) | 
					
						
							|  |  |  | 		}) | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 	} | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | } | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | var testData = []seriesSamples{ | 
					
						
							|  |  |  | 	{ | 
					
						
							|  |  |  | 		lset: map[string]string{"a": "a"}, | 
					
						
							|  |  |  | 		chunks: [][]sample{ | 
					
						
							|  |  |  | 			{{1, 2}, {2, 3}, {3, 4}}, | 
					
						
							|  |  |  | 			{{5, 2}, {6, 3}, {7, 4}}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 	}, | 
					
						
							|  |  |  | 	{ | 
					
						
							|  |  |  | 		lset: map[string]string{"a": "a", "b": "b"}, | 
					
						
							|  |  |  | 		chunks: [][]sample{ | 
					
						
							|  |  |  | 			{{1, 1}, {2, 2}, {3, 3}}, | 
					
						
							|  |  |  | 			{{5, 3}, {6, 6}}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 	}, | 
					
						
							|  |  |  | 	{ | 
					
						
							|  |  |  | 		lset: map[string]string{"b": "b"}, | 
					
						
							|  |  |  | 		chunks: [][]sample{ | 
					
						
							|  |  |  | 			{{1, 3}, {2, 2}, {3, 6}}, | 
					
						
							|  |  |  | 			{{5, 1}, {6, 7}, {7, 2}}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 	}, | 
					
						
							|  |  |  | } | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | func TestBlockQuerierDelete(t *testing.T) { | 
					
						
							|  |  |  | 	stones := tombstones.NewTestMemTombstones([]tombstones.Intervals{ | 
					
						
							|  |  |  | 		{{Mint: 1, Maxt: 3}}, | 
					
						
							|  |  |  | 		{{Mint: 1, Maxt: 3}, {Mint: 6, Maxt: 10}}, | 
					
						
							|  |  |  | 		{{Mint: 6, Maxt: 10}}, | 
					
						
							|  |  |  | 	}) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	for _, c := range []blockQuerierTestCase{ | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			mint:    0, | 
					
						
							|  |  |  | 			maxt:    0, | 
					
						
							|  |  |  | 			ms:      []*labels.Matcher{}, | 
					
						
							|  |  |  | 			exp:     newMockSeriesSet([]storage.Series{}), | 
					
						
							|  |  |  | 			expChks: newMockChunkSeriesSet([]storage.ChunkSeries{}), | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			mint:    0, | 
					
						
							|  |  |  | 			maxt:    0, | 
					
						
							|  |  |  | 			ms:      []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")}, | 
					
						
							|  |  |  | 			exp:     newMockSeriesSet([]storage.Series{}), | 
					
						
							|  |  |  | 			expChks: newMockChunkSeriesSet([]storage.ChunkSeries{}), | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			mint:    1, | 
					
						
							|  |  |  | 			maxt:    0, | 
					
						
							|  |  |  | 			ms:      []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")}, | 
					
						
							|  |  |  | 			exp:     newMockSeriesSet([]storage.Series{}), | 
					
						
							|  |  |  | 			expChks: newMockChunkSeriesSet([]storage.ChunkSeries{}), | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			mint:    math.MinInt64, | 
					
						
							|  |  |  | 			maxt:    math.MaxInt64, | 
					
						
							|  |  |  | 			ms:      []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "x")}, | 
					
						
							|  |  |  | 			exp:     newMockSeriesSet([]storage.Series{}), | 
					
						
							|  |  |  | 			expChks: newMockChunkSeriesSet([]storage.ChunkSeries{}), | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			mint: math.MinInt64, | 
					
						
							|  |  |  | 			maxt: math.MaxInt64, | 
					
						
							|  |  |  | 			ms:   []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "a", ".*")}, | 
					
						
							|  |  |  | 			exp: newMockSeriesSet([]storage.Series{ | 
					
						
							|  |  |  | 				storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{5, 2}, sample{6, 3}, sample{7, 4}}, | 
					
						
							|  |  |  | 				), | 
					
						
							|  |  |  | 				storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{5, 3}}, | 
					
						
							|  |  |  | 				), | 
					
						
							|  |  |  | 				storage.NewListSeries(labels.Labels{{Name: "b", Value: "b"}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{1, 3}, sample{2, 2}, sample{3, 6}, sample{5, 1}}, | 
					
						
							|  |  |  | 				), | 
					
						
							|  |  |  | 			}), | 
					
						
							|  |  |  | 			expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ | 
					
						
							|  |  |  | 				storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{5, 2}, sample{6, 3}, sample{7, 4}}, | 
					
						
							|  |  |  | 				), | 
					
						
							|  |  |  | 				storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{5, 3}}, | 
					
						
							|  |  |  | 				), | 
					
						
							|  |  |  | 				storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "b", Value: "b"}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{1, 3}, sample{2, 2}, sample{3, 6}}, []tsdbutil.Sample{sample{5, 1}}, | 
					
						
							|  |  |  | 				), | 
					
						
							|  |  |  | 			}), | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 		}, | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 		{ | 
					
						
							|  |  |  | 			mint: 2, | 
					
						
							|  |  |  | 			maxt: 6, | 
					
						
							|  |  |  | 			ms:   []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")}, | 
					
						
							|  |  |  | 			exp: newMockSeriesSet([]storage.Series{ | 
					
						
							|  |  |  | 				storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{5, 2}, sample{6, 3}}, | 
					
						
							|  |  |  | 				), | 
					
						
							|  |  |  | 				storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{5, 3}}, | 
					
						
							|  |  |  | 				), | 
					
						
							|  |  |  | 			}), | 
					
						
							|  |  |  | 			expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ | 
					
						
							|  |  |  | 				storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{5, 2}, sample{6, 3}}, | 
					
						
							|  |  |  | 				), | 
					
						
							|  |  |  | 				storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, | 
					
						
							|  |  |  | 					[]tsdbutil.Sample{sample{5, 3}}, | 
					
						
							|  |  |  | 				), | 
					
						
							|  |  |  | 			}), | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 	} { | 
					
						
							|  |  |  | 		t.Run("", func(t *testing.T) { | 
					
						
							|  |  |  | 			ir, cr, _, _ := createIdxChkReaders(t, testData) | 
					
						
							|  |  |  | 			testBlockQuerier(t, c, ir, cr, stones) | 
					
						
							|  |  |  | 		}) | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 	} | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | } | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | type fakeChunksReader struct { | 
					
						
							|  |  |  | 	ChunkReader | 
					
						
							| 
									
										
										
										
											2021-11-06 18:10:04 +08:00
										 |  |  | 	chks map[chunks.ChunkRef]chunkenc.Chunk | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | } | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | func createFakeReaderAndNotPopulatedChunks(s ...[]tsdbutil.Sample) (*fakeChunksReader, []chunks.Meta) { | 
					
						
							|  |  |  | 	f := &fakeChunksReader{ | 
					
						
							| 
									
										
										
										
											2021-11-06 18:10:04 +08:00
										 |  |  | 		chks: map[chunks.ChunkRef]chunkenc.Chunk{}, | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 	} | 
					
						
							|  |  |  | 	chks := make([]chunks.Meta, 0, len(s)) | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 	for ref, samples := range s { | 
					
						
							|  |  |  | 		chk := tsdbutil.ChunkFromSamples(samples) | 
					
						
							| 
									
										
										
										
											2021-11-06 18:10:04 +08:00
										 |  |  | 		f.chks[chunks.ChunkRef(ref)] = chk.Chunk | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 		chks = append(chks, chunks.Meta{ | 
					
						
							| 
									
										
										
										
											2021-11-06 18:10:04 +08:00
										 |  |  | 			Ref:     chunks.ChunkRef(ref), | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			MinTime: chk.MinTime, | 
					
						
							|  |  |  | 			MaxTime: chk.MaxTime, | 
					
						
							|  |  |  | 		}) | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 	} | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 	return f, chks | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-11-06 18:10:04 +08:00
										 |  |  | func (r *fakeChunksReader) Chunk(ref chunks.ChunkRef) (chunkenc.Chunk, error) { | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 	chk, ok := r.chks[ref] | 
					
						
							|  |  |  | 	if !ok { | 
					
						
							|  |  |  | 		return nil, errors.Errorf("chunk not found at ref %v", ref) | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 	return chk, nil | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | func TestPopulateWithTombSeriesIterators(t *testing.T) { | 
					
						
							|  |  |  | 	cases := []struct { | 
					
						
							|  |  |  | 		name string | 
					
						
							|  |  |  | 		chks [][]tsdbutil.Sample | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 		expected     []tsdbutil.Sample | 
					
						
							|  |  |  | 		expectedChks []chunks.Meta | 
					
						
							| 
									
										
										
										
											2017-04-14 03:07:21 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 		intervals tombstones.Intervals | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 		// Seek being zero means do not test seek.
 | 
					
						
							|  |  |  | 		seek        int64 | 
					
						
							|  |  |  | 		seekSuccess bool | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 	}{ | 
					
						
							|  |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			name: "no chunk", | 
					
						
							|  |  |  | 			chks: [][]tsdbutil.Sample{}, | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			name: "one empty chunk", // This should never happen.
 | 
					
						
							|  |  |  | 			chks: [][]tsdbutil.Sample{{}}, | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 			expectedChks: []chunks.Meta{ | 
					
						
							|  |  |  | 				tsdbutil.ChunkFromSamples([]tsdbutil.Sample{}), | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 			}, | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			name: "three empty chunks", // This should never happen.
 | 
					
						
							|  |  |  | 			chks: [][]tsdbutil.Sample{{}, {}, {}}, | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			expectedChks: []chunks.Meta{ | 
					
						
							|  |  |  | 				tsdbutil.ChunkFromSamples([]tsdbutil.Sample{}), | 
					
						
							|  |  |  | 				tsdbutil.ChunkFromSamples([]tsdbutil.Sample{}), | 
					
						
							|  |  |  | 				tsdbutil.ChunkFromSamples([]tsdbutil.Sample{}), | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			name: "one chunk", | 
					
						
							|  |  |  | 			chks: [][]tsdbutil.Sample{ | 
					
						
							|  |  |  | 				{sample{1, 2}, sample{2, 3}, sample{3, 5}, sample{6, 1}}, | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 			expected: []tsdbutil.Sample{ | 
					
						
							| 
									
										
										
										
											2018-10-26 04:06:19 +08:00
										 |  |  | 				sample{1, 2}, sample{2, 3}, sample{3, 5}, sample{6, 1}, | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 			}, | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			expectedChks: []chunks.Meta{ | 
					
						
							|  |  |  | 				tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ | 
					
						
							|  |  |  | 					sample{1, 2}, sample{2, 3}, sample{3, 5}, sample{6, 1}, | 
					
						
							|  |  |  | 				}), | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			name: "two full chunks", | 
					
						
							|  |  |  | 			chks: [][]tsdbutil.Sample{ | 
					
						
							|  |  |  | 				{sample{1, 2}, sample{2, 3}, sample{3, 5}, sample{6, 1}}, | 
					
						
							|  |  |  | 				{sample{7, 89}, sample{9, 8}}, | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 			}, | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			expected: []tsdbutil.Sample{ | 
					
						
							| 
									
										
										
										
											2018-10-26 04:06:19 +08:00
										 |  |  | 				sample{1, 2}, sample{2, 3}, sample{3, 5}, sample{6, 1}, sample{7, 89}, sample{9, 8}, | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 			}, | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			expectedChks: []chunks.Meta{ | 
					
						
							|  |  |  | 				tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ | 
					
						
							|  |  |  | 					sample{1, 2}, sample{2, 3}, sample{3, 5}, sample{6, 1}, | 
					
						
							|  |  |  | 				}), | 
					
						
							|  |  |  | 				tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ | 
					
						
							|  |  |  | 					sample{7, 89}, sample{9, 8}, | 
					
						
							|  |  |  | 				}), | 
					
						
							|  |  |  | 			}, | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			name: "three full chunks", | 
					
						
							|  |  |  | 			chks: [][]tsdbutil.Sample{ | 
					
						
							|  |  |  | 				{sample{1, 2}, sample{2, 3}, sample{3, 5}, sample{6, 1}}, | 
					
						
							|  |  |  | 				{sample{7, 89}, sample{9, 8}}, | 
					
						
							|  |  |  | 				{sample{10, 22}, sample{203, 3493}}, | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 			}, | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			expected: []tsdbutil.Sample{ | 
					
						
							| 
									
										
										
										
											2018-10-26 04:06:19 +08:00
										 |  |  | 				sample{1, 2}, sample{2, 3}, sample{3, 5}, sample{6, 1}, sample{7, 89}, sample{9, 8}, sample{10, 22}, sample{203, 3493}, | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 			}, | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			expectedChks: []chunks.Meta{ | 
					
						
							|  |  |  | 				tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ | 
					
						
							|  |  |  | 					sample{1, 2}, sample{2, 3}, sample{3, 5}, sample{6, 1}, | 
					
						
							|  |  |  | 				}), | 
					
						
							|  |  |  | 				tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ | 
					
						
							|  |  |  | 					sample{7, 89}, sample{9, 8}, | 
					
						
							|  |  |  | 				}), | 
					
						
							|  |  |  | 				tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ | 
					
						
							|  |  |  | 					sample{10, 22}, sample{203, 3493}, | 
					
						
							|  |  |  | 				}), | 
					
						
							|  |  |  | 			}, | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 		}, | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 		// Seek cases.
 | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			name: "three empty chunks and seek", // This should never happen.
 | 
					
						
							|  |  |  | 			chks: [][]tsdbutil.Sample{{}, {}, {}}, | 
					
						
							|  |  |  | 			seek: 1, | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			seekSuccess: false, | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			name: "two chunks and seek beyond chunks", | 
					
						
							|  |  |  | 			chks: [][]tsdbutil.Sample{ | 
					
						
							|  |  |  | 				{sample{1, 2}, sample{3, 5}, sample{6, 1}}, | 
					
						
							|  |  |  | 				{sample{7, 89}, sample{9, 8}}, | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 			}, | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			seek: 10, | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			seekSuccess: false, | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			name: "two chunks and seek on middle of first chunk", | 
					
						
							|  |  |  | 			chks: [][]tsdbutil.Sample{ | 
					
						
							|  |  |  | 				{sample{1, 2}, sample{3, 5}, sample{6, 1}}, | 
					
						
							|  |  |  | 				{sample{7, 89}, sample{9, 8}}, | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 			}, | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			seek: 2, | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			seekSuccess: true, | 
					
						
							|  |  |  | 			expected: []tsdbutil.Sample{ | 
					
						
							| 
									
										
										
										
											2018-10-26 04:06:19 +08:00
										 |  |  | 				sample{3, 5}, sample{6, 1}, sample{7, 89}, sample{9, 8}, | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			name: "two chunks and seek before first chunk", | 
					
						
							|  |  |  | 			chks: [][]tsdbutil.Sample{ | 
					
						
							|  |  |  | 				{sample{1, 2}, sample{3, 5}, sample{6, 1}}, | 
					
						
							|  |  |  | 				{sample{7, 89}, sample{9, 8}}, | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 			}, | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			seek: -32, | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 			seekSuccess: true, | 
					
						
							|  |  |  | 			expected: []tsdbutil.Sample{ | 
					
						
							|  |  |  | 				sample{1, 2}, sample{3, 5}, sample{6, 1}, sample{7, 89}, sample{9, 8}, | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 			}, | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 		}, | 
					
						
							|  |  |  | 		// Deletion / Trim cases.
 | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			name:      "no chunk with deletion interval", | 
					
						
							|  |  |  | 			chks:      [][]tsdbutil.Sample{}, | 
					
						
							|  |  |  | 			intervals: tombstones.Intervals{{Mint: 20, Maxt: 21}}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			name: "two chunks with trimmed first and last samples from edge chunks", | 
					
						
							|  |  |  | 			chks: [][]tsdbutil.Sample{ | 
					
						
							|  |  |  | 				{sample{1, 2}, sample{2, 3}, sample{3, 5}, sample{6, 1}}, | 
					
						
							|  |  |  | 				{sample{7, 89}, sample{9, 8}}, | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 			}, | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			intervals: tombstones.Intervals{{Mint: math.MinInt64, Maxt: 2}}.Add(tombstones.Interval{Mint: 9, Maxt: math.MaxInt64}), | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			expected: []tsdbutil.Sample{ | 
					
						
							|  |  |  | 				sample{3, 5}, sample{6, 1}, sample{7, 89}, | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 			expectedChks: []chunks.Meta{ | 
					
						
							|  |  |  | 				tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ | 
					
						
							|  |  |  | 					sample{3, 5}, sample{6, 1}, | 
					
						
							|  |  |  | 				}), | 
					
						
							|  |  |  | 				tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ | 
					
						
							|  |  |  | 					sample{7, 89}, | 
					
						
							|  |  |  | 				}), | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			name: "two chunks with trimmed middle sample of first chunk", | 
					
						
							|  |  |  | 			chks: [][]tsdbutil.Sample{ | 
					
						
							|  |  |  | 				{sample{1, 2}, sample{2, 3}, sample{3, 5}, sample{6, 1}}, | 
					
						
							|  |  |  | 				{sample{7, 89}, sample{9, 8}}, | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 			}, | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			intervals: tombstones.Intervals{{Mint: 2, Maxt: 3}}, | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 			expected: []tsdbutil.Sample{ | 
					
						
							|  |  |  | 				sample{1, 2}, sample{6, 1}, sample{7, 89}, sample{9, 8}, | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 			}, | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			expectedChks: []chunks.Meta{ | 
					
						
							|  |  |  | 				tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ | 
					
						
							|  |  |  | 					sample{1, 2}, sample{6, 1}, | 
					
						
							|  |  |  | 				}), | 
					
						
							|  |  |  | 				tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ | 
					
						
							|  |  |  | 					sample{7, 89}, sample{9, 8}, | 
					
						
							|  |  |  | 				}), | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 			}, | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			name: "two chunks with deletion across two chunks", | 
					
						
							|  |  |  | 			chks: [][]tsdbutil.Sample{ | 
					
						
							|  |  |  | 				{sample{1, 2}, sample{2, 3}, sample{3, 5}, sample{6, 1}}, | 
					
						
							|  |  |  | 				{sample{7, 89}, sample{9, 8}}, | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 			intervals: tombstones.Intervals{{Mint: 6, Maxt: 7}}, | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			expected: []tsdbutil.Sample{ | 
					
						
							|  |  |  | 				sample{1, 2}, sample{2, 3}, sample{3, 5}, sample{9, 8}, | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 			expectedChks: []chunks.Meta{ | 
					
						
							|  |  |  | 				tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ | 
					
						
							|  |  |  | 					sample{1, 2}, sample{2, 3}, sample{3, 5}, | 
					
						
							|  |  |  | 				}), | 
					
						
							|  |  |  | 				tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ | 
					
						
							|  |  |  | 					sample{9, 8}, | 
					
						
							|  |  |  | 				}), | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 		// Deletion with seek.
 | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			name: "two chunks with trimmed first and last samples from edge chunks, seek from middle of first chunk", | 
					
						
							|  |  |  | 			chks: [][]tsdbutil.Sample{ | 
					
						
							|  |  |  | 				{sample{1, 2}, sample{2, 3}, sample{3, 5}, sample{6, 1}}, | 
					
						
							|  |  |  | 				{sample{7, 89}, sample{9, 8}}, | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 			intervals: tombstones.Intervals{{Mint: math.MinInt64, Maxt: 2}}.Add(tombstones.Interval{Mint: 9, Maxt: math.MaxInt64}), | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			seek:        3, | 
					
						
							|  |  |  | 			seekSuccess: true, | 
					
						
							|  |  |  | 			expected: []tsdbutil.Sample{ | 
					
						
							|  |  |  | 				sample{3, 5}, sample{6, 1}, sample{7, 89}, | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 	for _, tc := range cases { | 
					
						
							|  |  |  | 		t.Run(tc.name, func(t *testing.T) { | 
					
						
							|  |  |  | 			t.Run("sample", func(t *testing.T) { | 
					
						
							|  |  |  | 				f, chkMetas := createFakeReaderAndNotPopulatedChunks(tc.chks...) | 
					
						
							|  |  |  | 				it := newPopulateWithDelGenericSeriesIterator(f, chkMetas, tc.intervals).toSeriesIterator() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 				var r []tsdbutil.Sample | 
					
						
							|  |  |  | 				if tc.seek != 0 { | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 					require.Equal(t, tc.seekSuccess, it.Seek(tc.seek)) | 
					
						
							|  |  |  | 					require.Equal(t, tc.seekSuccess, it.Seek(tc.seek)) // Next one should be noop.
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 					if tc.seekSuccess { | 
					
						
							|  |  |  | 						// After successful seek iterator is ready. Grab the value.
 | 
					
						
							|  |  |  | 						t, v := it.At() | 
					
						
							|  |  |  | 						r = append(r, sample{t: t, v: v}) | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 					} | 
					
						
							|  |  |  | 				} | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 				expandedResult, err := storage.ExpandSamples(it, newSample) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 				require.NoError(t, err) | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 				r = append(r, expandedResult...) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 				require.Equal(t, tc.expected, r) | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			}) | 
					
						
							|  |  |  | 			t.Run("chunk", func(t *testing.T) { | 
					
						
							|  |  |  | 				f, chkMetas := createFakeReaderAndNotPopulatedChunks(tc.chks...) | 
					
						
							|  |  |  | 				it := newPopulateWithDelGenericSeriesIterator(f, chkMetas, tc.intervals).toChunkSeriesIterator() | 
					
						
							| 
									
										
										
										
											2019-02-14 21:29:41 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 				if tc.seek != 0 { | 
					
						
							|  |  |  | 					// Chunk iterator does not have Seek method.
 | 
					
						
							|  |  |  | 					return | 
					
						
							| 
									
										
										
										
											2019-02-14 21:29:41 +08:00
										 |  |  | 				} | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 				expandedResult, err := storage.ExpandChunks(it) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 				require.NoError(t, err) | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-08-07 15:57:25 +08:00
										 |  |  | 				// We don't care about ref IDs for comparison, only chunk's samples matters.
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 				rmChunkRefs(expandedResult) | 
					
						
							|  |  |  | 				rmChunkRefs(tc.expectedChks) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 				require.Equal(t, tc.expectedChks, expandedResult) | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			}) | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | 		}) | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 	} | 
					
						
							| 
									
										
										
										
											2017-04-09 22:00:25 +08:00
										 |  |  | } | 
					
						
							| 
									
										
										
										
											2017-05-01 17:03:56 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | func rmChunkRefs(chks []chunks.Meta) { | 
					
						
							|  |  |  | 	for i := range chks { | 
					
						
							|  |  |  | 		chks[i].Ref = 0 | 
					
						
							| 
									
										
										
										
											2017-06-13 15:51:22 +08:00
										 |  |  | 	} | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | } | 
					
						
							| 
									
										
										
										
											2017-06-13 15:51:22 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | // Regression for: https://github.com/prometheus/tsdb/pull/97
 | 
					
						
							|  |  |  | func TestPopulateWithDelSeriesIterator_DoubleSeek(t *testing.T) { | 
					
						
							|  |  |  | 	f, chkMetas := createFakeReaderAndNotPopulatedChunks( | 
					
						
							|  |  |  | 		[]tsdbutil.Sample{}, | 
					
						
							|  |  |  | 		[]tsdbutil.Sample{sample{1, 1}, sample{2, 2}, sample{3, 3}}, | 
					
						
							|  |  |  | 		[]tsdbutil.Sample{sample{4, 4}, sample{5, 5}}, | 
					
						
							|  |  |  | 	) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	it := newPopulateWithDelGenericSeriesIterator(f, chkMetas, nil).toSeriesIterator() | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.True(t, it.Seek(1)) | 
					
						
							|  |  |  | 	require.True(t, it.Seek(2)) | 
					
						
							|  |  |  | 	require.True(t, it.Seek(2)) | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 	ts, v := it.At() | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.Equal(t, int64(2), ts) | 
					
						
							|  |  |  | 	require.Equal(t, float64(2), v) | 
					
						
							| 
									
										
										
										
											2017-06-13 15:51:22 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-06-30 21:06:27 +08:00
										 |  |  | // Regression when seeked chunks were still found via binary search and we always
 | 
					
						
							|  |  |  | // skipped to the end when seeking a value in the current chunk.
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | func TestPopulateWithDelSeriesIterator_SeekInCurrentChunk(t *testing.T) { | 
					
						
							|  |  |  | 	f, chkMetas := createFakeReaderAndNotPopulatedChunks( | 
					
						
							|  |  |  | 		[]tsdbutil.Sample{}, | 
					
						
							|  |  |  | 		[]tsdbutil.Sample{sample{1, 2}, sample{3, 4}, sample{5, 6}, sample{7, 8}}, | 
					
						
							|  |  |  | 		[]tsdbutil.Sample{}, | 
					
						
							|  |  |  | 	) | 
					
						
							| 
									
										
										
										
											2017-06-30 21:06:27 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 	it := newPopulateWithDelGenericSeriesIterator(f, chkMetas, nil).toSeriesIterator() | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.True(t, it.Next()) | 
					
						
							| 
									
										
										
										
											2017-06-30 21:06:27 +08:00
										 |  |  | 	ts, v := it.At() | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.Equal(t, int64(1), ts) | 
					
						
							|  |  |  | 	require.Equal(t, float64(2), v) | 
					
						
							| 
									
										
										
										
											2017-06-30 21:06:27 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.True(t, it.Seek(4)) | 
					
						
							| 
									
										
										
										
											2017-06-30 21:06:27 +08:00
										 |  |  | 	ts, v = it.At() | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.Equal(t, int64(5), ts) | 
					
						
							|  |  |  | 	require.Equal(t, float64(6), v) | 
					
						
							| 
									
										
										
										
											2017-06-30 21:06:27 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | func TestPopulateWithDelSeriesIterator_SeekWithMinTime(t *testing.T) { | 
					
						
							|  |  |  | 	f, chkMetas := createFakeReaderAndNotPopulatedChunks( | 
					
						
							|  |  |  | 		[]tsdbutil.Sample{sample{1, 6}, sample{5, 6}, sample{6, 8}}, | 
					
						
							| 
									
										
										
										
											2017-05-01 17:03:56 +08:00
										 |  |  | 	) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 	it := newPopulateWithDelGenericSeriesIterator(f, chkMetas, nil).toSeriesIterator() | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.Equal(t, false, it.Seek(7)) | 
					
						
							|  |  |  | 	require.Equal(t, true, it.Seek(3)) | 
					
						
							| 
									
										
										
										
											2017-05-01 17:03:56 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | // Regression when calling Next() with a time bounded to fit within two samples.
 | 
					
						
							|  |  |  | // Seek gets called and advances beyond the max time, which was just accepted as a valid sample.
 | 
					
						
							|  |  |  | func TestPopulateWithDelSeriesIterator_NextWithMinTime(t *testing.T) { | 
					
						
							|  |  |  | 	f, chkMetas := createFakeReaderAndNotPopulatedChunks( | 
					
						
							|  |  |  | 		[]tsdbutil.Sample{sample{1, 6}, sample{5, 6}, sample{7, 8}}, | 
					
						
							|  |  |  | 	) | 
					
						
							| 
									
										
										
										
											2017-05-01 17:03:56 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 	it := newPopulateWithDelGenericSeriesIterator( | 
					
						
							|  |  |  | 		f, chkMetas, tombstones.Intervals{{Mint: math.MinInt64, Maxt: 2}}.Add(tombstones.Interval{Mint: 4, Maxt: math.MaxInt64}), | 
					
						
							|  |  |  | 	).toSeriesIterator() | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.Equal(t, false, it.Next()) | 
					
						
							| 
									
										
										
										
											2017-05-01 17:03:56 +08:00
										 |  |  | } | 
					
						
							| 
									
										
										
										
											2017-07-05 22:19:28 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | // Test the cost of merging series sets for different number of merged sets and their size.
 | 
					
						
							|  |  |  | // The subset are all equivalent so this does not capture merging of partial or non-overlapping sets well.
 | 
					
						
							| 
									
										
										
										
											2020-02-06 23:58:38 +08:00
										 |  |  | // TODO(bwplotka): Merge with storage merged series set benchmark.
 | 
					
						
							| 
									
										
										
										
											2017-07-05 22:19:28 +08:00
										 |  |  | func BenchmarkMergedSeriesSet(b *testing.B) { | 
					
						
							| 
									
										
										
										
											2021-10-22 16:06:44 +08:00
										 |  |  | 	sel := func(sets []storage.SeriesSet) storage.SeriesSet { | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 		return storage.NewMergeSeriesSet(sets, storage.ChainedSeriesMerge) | 
					
						
							| 
									
										
										
										
											2017-07-05 22:19:28 +08:00
										 |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	for _, k := range []int{ | 
					
						
							|  |  |  | 		100, | 
					
						
							|  |  |  | 		1000, | 
					
						
							|  |  |  | 		10000, | 
					
						
							| 
									
										
										
										
											2018-09-14 00:34:26 +08:00
										 |  |  | 		20000, | 
					
						
							| 
									
										
										
										
											2017-07-05 22:19:28 +08:00
										 |  |  | 	} { | 
					
						
							|  |  |  | 		for _, j := range []int{1, 2, 4, 8, 16, 32} { | 
					
						
							|  |  |  | 			b.Run(fmt.Sprintf("series=%d,blocks=%d", k, j), func(b *testing.B) { | 
					
						
							| 
									
										
										
										
											2018-10-25 17:32:57 +08:00
										 |  |  | 				lbls, err := labels.ReadLabels(filepath.Join("testdata", "20kseries.json"), k) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 				require.NoError(b, err) | 
					
						
							| 
									
										
										
										
											2017-07-05 22:19:28 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 				sort.Sort(labels.Slice(lbls)) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-02-06 23:58:38 +08:00
										 |  |  | 				in := make([][]storage.Series, j) | 
					
						
							| 
									
										
										
										
											2017-07-05 22:19:28 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 				for _, l := range lbls { | 
					
						
							|  |  |  | 					l2 := l | 
					
						
							|  |  |  | 					for j := range in { | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 						in[j] = append(in[j], storage.NewListSeries(l2, nil)) | 
					
						
							| 
									
										
										
										
											2017-07-05 22:19:28 +08:00
										 |  |  | 					} | 
					
						
							|  |  |  | 				} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 				b.ResetTimer() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 				for i := 0; i < b.N; i++ { | 
					
						
							| 
									
										
										
										
											2020-02-06 23:58:38 +08:00
										 |  |  | 					var sets []storage.SeriesSet | 
					
						
							| 
									
										
										
										
											2017-07-05 22:19:28 +08:00
										 |  |  | 					for _, s := range in { | 
					
						
							| 
									
										
										
										
											2018-09-21 16:07:35 +08:00
										 |  |  | 						sets = append(sets, newMockSeriesSet(s)) | 
					
						
							| 
									
										
										
										
											2017-07-05 22:19:28 +08:00
										 |  |  | 					} | 
					
						
							|  |  |  | 					ms := sel(sets) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 					i := 0 | 
					
						
							|  |  |  | 					for ms.Next() { | 
					
						
							|  |  |  | 						i++ | 
					
						
							|  |  |  | 					} | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 					require.NoError(b, ms.Err()) | 
					
						
							|  |  |  | 					require.Equal(b, len(lbls), i) | 
					
						
							| 
									
										
										
										
											2017-07-05 22:19:28 +08:00
										 |  |  | 				} | 
					
						
							|  |  |  | 			}) | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | } | 
					
						
							| 
									
										
										
										
											2017-11-30 22:34:49 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-11-06 18:10:04 +08:00
										 |  |  | type mockChunkReader map[chunks.ChunkRef]chunkenc.Chunk | 
					
						
							| 
									
										
										
										
											2017-11-30 22:34:49 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-11-06 18:10:04 +08:00
										 |  |  | func (cr mockChunkReader) Chunk(id chunks.ChunkRef) (chunkenc.Chunk, error) { | 
					
						
							| 
									
										
										
										
											2017-11-30 22:34:49 +08:00
										 |  |  | 	chk, ok := cr[id] | 
					
						
							|  |  |  | 	if ok { | 
					
						
							|  |  |  | 		return chk, nil | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	return nil, errors.New("Chunk with ref not found") | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | func (cr mockChunkReader) Close() error { | 
					
						
							|  |  |  | 	return nil | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | func TestDeletedIterator(t *testing.T) { | 
					
						
							| 
									
										
										
										
											2017-12-21 18:55:58 +08:00
										 |  |  | 	chk := chunkenc.NewXORChunk() | 
					
						
							| 
									
										
										
										
											2017-11-30 22:34:49 +08:00
										 |  |  | 	app, err := chk.Appender() | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.NoError(t, err) | 
					
						
							| 
									
										
										
										
											2017-11-30 22:34:49 +08:00
										 |  |  | 	// Insert random stuff from (0, 1000).
 | 
					
						
							|  |  |  | 	act := make([]sample, 1000) | 
					
						
							|  |  |  | 	for i := 0; i < 1000; i++ { | 
					
						
							|  |  |  | 		act[i].t = int64(i) | 
					
						
							|  |  |  | 		act[i].v = rand.Float64() | 
					
						
							|  |  |  | 		app.Append(act[i].t, act[i].v) | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	cases := []struct { | 
					
						
							| 
									
										
										
										
											2019-09-19 17:15:41 +08:00
										 |  |  | 		r tombstones.Intervals | 
					
						
							| 
									
										
										
										
											2017-11-30 22:34:49 +08:00
										 |  |  | 	}{ | 
					
						
							| 
									
										
										
										
											2019-09-19 17:15:41 +08:00
										 |  |  | 		{r: tombstones.Intervals{{Mint: 1, Maxt: 20}}}, | 
					
						
							|  |  |  | 		{r: tombstones.Intervals{{Mint: 1, Maxt: 10}, {Mint: 12, Maxt: 20}, {Mint: 21, Maxt: 23}, {Mint: 25, Maxt: 30}}}, | 
					
						
							|  |  |  | 		{r: tombstones.Intervals{{Mint: 1, Maxt: 10}, {Mint: 12, Maxt: 20}, {Mint: 20, Maxt: 30}}}, | 
					
						
							|  |  |  | 		{r: tombstones.Intervals{{Mint: 1, Maxt: 10}, {Mint: 12, Maxt: 23}, {Mint: 25, Maxt: 30}}}, | 
					
						
							|  |  |  | 		{r: tombstones.Intervals{{Mint: 1, Maxt: 23}, {Mint: 12, Maxt: 20}, {Mint: 25, Maxt: 30}}}, | 
					
						
							|  |  |  | 		{r: tombstones.Intervals{{Mint: 1, Maxt: 23}, {Mint: 12, Maxt: 20}, {Mint: 25, Maxt: 3000}}}, | 
					
						
							|  |  |  | 		{r: tombstones.Intervals{{Mint: 0, Maxt: 2000}}}, | 
					
						
							|  |  |  | 		{r: tombstones.Intervals{{Mint: 500, Maxt: 2000}}}, | 
					
						
							|  |  |  | 		{r: tombstones.Intervals{{Mint: 0, Maxt: 200}}}, | 
					
						
							|  |  |  | 		{r: tombstones.Intervals{{Mint: 1000, Maxt: 20000}}}, | 
					
						
							| 
									
										
										
										
											2017-11-30 22:34:49 +08:00
										 |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-02-06 23:58:38 +08:00
										 |  |  | 	for _, c := range cases { | 
					
						
							| 
									
										
										
										
											2020-04-23 17:00:30 +08:00
										 |  |  | 		i := int64(-1) | 
					
						
							| 
									
										
										
										
											2020-11-10 00:51:25 +08:00
										 |  |  | 		it := &DeletedIterator{Iter: chk.Iterator(nil), Intervals: c.r[:]} | 
					
						
							| 
									
										
										
										
											2020-04-23 17:00:30 +08:00
										 |  |  | 		ranges := c.r[:] | 
					
						
							|  |  |  | 		for it.Next() { | 
					
						
							| 
									
										
										
										
											2020-02-06 23:58:38 +08:00
										 |  |  | 			i++ | 
					
						
							|  |  |  | 			for _, tr := range ranges { | 
					
						
							|  |  |  | 				if tr.InBounds(i) { | 
					
						
							|  |  |  | 					i = tr.Maxt + 1 | 
					
						
							|  |  |  | 					ranges = ranges[1:] | 
					
						
							|  |  |  | 				} | 
					
						
							|  |  |  | 			} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 			require.Less(t, i, int64(1000)) | 
					
						
							| 
									
										
										
										
											2020-02-06 23:58:38 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-04-23 17:00:30 +08:00
										 |  |  | 			ts, v := it.At() | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 			require.Equal(t, act[i].t, ts) | 
					
						
							|  |  |  | 			require.Equal(t, act[i].v, v) | 
					
						
							| 
									
										
										
										
											2020-04-23 17:00:30 +08:00
										 |  |  | 		} | 
					
						
							|  |  |  | 		// There has been an extra call to Next().
 | 
					
						
							|  |  |  | 		i++ | 
					
						
							|  |  |  | 		for _, tr := range ranges { | 
					
						
							|  |  |  | 			if tr.InBounds(i) { | 
					
						
							|  |  |  | 				i = tr.Maxt + 1 | 
					
						
							|  |  |  | 				ranges = ranges[1:] | 
					
						
							| 
									
										
										
										
											2020-02-06 23:58:38 +08:00
										 |  |  | 			} | 
					
						
							| 
									
										
										
										
											2020-04-23 17:00:30 +08:00
										 |  |  | 		} | 
					
						
							| 
									
										
										
										
											2020-02-06 23:58:38 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 		require.GreaterOrEqual(t, i, int64(1000)) | 
					
						
							|  |  |  | 		require.NoError(t, it.Err()) | 
					
						
							| 
									
										
										
										
											2020-02-06 23:58:38 +08:00
										 |  |  | 	} | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | func TestDeletedIterator_WithSeek(t *testing.T) { | 
					
						
							|  |  |  | 	chk := chunkenc.NewXORChunk() | 
					
						
							|  |  |  | 	app, err := chk.Appender() | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.NoError(t, err) | 
					
						
							| 
									
										
										
										
											2020-02-06 23:58:38 +08:00
										 |  |  | 	// Insert random stuff from (0, 1000).
 | 
					
						
							|  |  |  | 	act := make([]sample, 1000) | 
					
						
							|  |  |  | 	for i := 0; i < 1000; i++ { | 
					
						
							|  |  |  | 		act[i].t = int64(i) | 
					
						
							| 
									
										
										
										
											2020-04-23 17:00:30 +08:00
										 |  |  | 		act[i].v = float64(i) | 
					
						
							| 
									
										
										
										
											2020-02-06 23:58:38 +08:00
										 |  |  | 		app.Append(act[i].t, act[i].v) | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	cases := []struct { | 
					
						
							| 
									
										
										
										
											2020-04-23 17:00:30 +08:00
										 |  |  | 		r        tombstones.Intervals | 
					
						
							|  |  |  | 		seek     int64 | 
					
						
							|  |  |  | 		ok       bool | 
					
						
							|  |  |  | 		seekedTs int64 | 
					
						
							| 
									
										
										
										
											2020-02-06 23:58:38 +08:00
										 |  |  | 	}{ | 
					
						
							| 
									
										
										
										
											2020-04-23 17:00:30 +08:00
										 |  |  | 		{r: tombstones.Intervals{{Mint: 1, Maxt: 20}}, seek: 1, ok: true, seekedTs: 21}, | 
					
						
							|  |  |  | 		{r: tombstones.Intervals{{Mint: 1, Maxt: 20}}, seek: 20, ok: true, seekedTs: 21}, | 
					
						
							|  |  |  | 		{r: tombstones.Intervals{{Mint: 1, Maxt: 20}}, seek: 10, ok: true, seekedTs: 21}, | 
					
						
							|  |  |  | 		{r: tombstones.Intervals{{Mint: 1, Maxt: 20}}, seek: 999, ok: true, seekedTs: 999}, | 
					
						
							|  |  |  | 		{r: tombstones.Intervals{{Mint: 1, Maxt: 20}}, seek: 1000, ok: false}, | 
					
						
							|  |  |  | 		{r: tombstones.Intervals{{Mint: 1, Maxt: 23}, {Mint: 24, Maxt: 40}, {Mint: 45, Maxt: 3000}}, seek: 1, ok: true, seekedTs: 41}, | 
					
						
							|  |  |  | 		{r: tombstones.Intervals{{Mint: 5, Maxt: 23}, {Mint: 24, Maxt: 40}, {Mint: 41, Maxt: 3000}}, seek: 5, ok: false}, | 
					
						
							|  |  |  | 		{r: tombstones.Intervals{{Mint: 0, Maxt: 2000}}, seek: 10, ok: false}, | 
					
						
							|  |  |  | 		{r: tombstones.Intervals{{Mint: 500, Maxt: 2000}}, seek: 10, ok: true, seekedTs: 10}, | 
					
						
							|  |  |  | 		{r: tombstones.Intervals{{Mint: 500, Maxt: 2000}}, seek: 501, ok: false}, | 
					
						
							| 
									
										
										
										
											2020-02-06 23:58:38 +08:00
										 |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-11-30 22:34:49 +08:00
										 |  |  | 	for _, c := range cases { | 
					
						
							| 
									
										
										
										
											2020-11-10 00:51:25 +08:00
										 |  |  | 		it := &DeletedIterator{Iter: chk.Iterator(nil), Intervals: c.r[:]} | 
					
						
							| 
									
										
										
										
											2017-11-30 22:34:49 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 		require.Equal(t, c.ok, it.Seek(c.seek)) | 
					
						
							| 
									
										
										
										
											2020-04-23 17:00:30 +08:00
										 |  |  | 		if c.ok { | 
					
						
							|  |  |  | 			ts, _ := it.At() | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 			require.Equal(t, c.seekedTs, ts) | 
					
						
							| 
									
										
										
										
											2017-11-30 22:34:49 +08:00
										 |  |  | 		} | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | type series struct { | 
					
						
							|  |  |  | 	l      labels.Labels | 
					
						
							|  |  |  | 	chunks []chunks.Meta | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | type mockIndex struct { | 
					
						
							| 
									
										
										
										
											2021-11-06 18:10:04 +08:00
										 |  |  | 	series   map[storage.SeriesRef]series | 
					
						
							|  |  |  | 	postings map[labels.Label][]storage.SeriesRef | 
					
						
							| 
									
										
										
										
											2020-01-01 19:21:42 +08:00
										 |  |  | 	symbols  map[string]struct{} | 
					
						
							| 
									
										
										
										
											2017-11-30 22:34:49 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | func newMockIndex() mockIndex { | 
					
						
							|  |  |  | 	ix := mockIndex{ | 
					
						
							| 
									
										
										
										
											2021-11-06 18:10:04 +08:00
										 |  |  | 		series:   make(map[storage.SeriesRef]series), | 
					
						
							|  |  |  | 		postings: make(map[labels.Label][]storage.SeriesRef), | 
					
						
							| 
									
										
										
										
											2020-01-01 19:21:42 +08:00
										 |  |  | 		symbols:  make(map[string]struct{}), | 
					
						
							| 
									
										
										
										
											2017-11-30 22:34:49 +08:00
										 |  |  | 	} | 
					
						
							|  |  |  | 	return ix | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
											  
											
												Stream symbols during compaction. (#6468)
Rather than buffer up symbols in RAM, do it one by one
during compaction. Then use the reader's symbol handling
for symbol lookups during the rest of the index write.
There is some slowdown in compaction, due to having to look through a file
rather than a hash lookup. This is noise to the overall cost of compacting
series with thousands of samples though.
benchmark                                                                                   old ns/op       new ns/op       delta
BenchmarkCompaction/type=normal,blocks=4,series=10000,samplesPerSeriesPerBlock=101-4        539917175       675341565       +25.08%
BenchmarkCompaction/type=normal,blocks=4,series=10000,samplesPerSeriesPerBlock=1001-4       2441815993      2477453524      +1.46%
BenchmarkCompaction/type=normal,blocks=4,series=10000,samplesPerSeriesPerBlock=2001-4       3978543559      3922909687      -1.40%
BenchmarkCompaction/type=normal,blocks=4,series=10000,samplesPerSeriesPerBlock=5001-4       8430219716      8586610007      +1.86%
BenchmarkCompaction/type=vertical,blocks=4,series=10000,samplesPerSeriesPerBlock=101-4      1786424591      1909552782      +6.89%
BenchmarkCompaction/type=vertical,blocks=4,series=10000,samplesPerSeriesPerBlock=1001-4     5328998202      6020839950      +12.98%
BenchmarkCompaction/type=vertical,blocks=4,series=10000,samplesPerSeriesPerBlock=2001-4     10085059958     11085278690     +9.92%
BenchmarkCompaction/type=vertical,blocks=4,series=10000,samplesPerSeriesPerBlock=5001-4     25497010155     27018079806     +5.97%
BenchmarkCompactionFromHead/labelnames=1,labelvalues=100000-4                               2427391406      2817217987      +16.06%
BenchmarkCompactionFromHead/labelnames=10,labelvalues=10000-4                               2592965497      2538805050      -2.09%
BenchmarkCompactionFromHead/labelnames=100,labelvalues=1000-4                               2437388343      2668012858      +9.46%
BenchmarkCompactionFromHead/labelnames=1000,labelvalues=100-4                               2317095324      2787423966      +20.30%
BenchmarkCompactionFromHead/labelnames=10000,labelvalues=10-4                               2600239857      2096973860      -19.35%
benchmark                                                                                   old allocs     new allocs     delta
BenchmarkCompaction/type=normal,blocks=4,series=10000,samplesPerSeriesPerBlock=101-4        500851         470794         -6.00%
BenchmarkCompaction/type=normal,blocks=4,series=10000,samplesPerSeriesPerBlock=1001-4       821527         791451         -3.66%
BenchmarkCompaction/type=normal,blocks=4,series=10000,samplesPerSeriesPerBlock=2001-4       1141562        1111508        -2.63%
BenchmarkCompaction/type=normal,blocks=4,series=10000,samplesPerSeriesPerBlock=5001-4       2141576        2111504        -1.40%
BenchmarkCompaction/type=vertical,blocks=4,series=10000,samplesPerSeriesPerBlock=101-4      871466         841424         -3.45%
BenchmarkCompaction/type=vertical,blocks=4,series=10000,samplesPerSeriesPerBlock=1001-4     1941428        1911415        -1.55%
BenchmarkCompaction/type=vertical,blocks=4,series=10000,samplesPerSeriesPerBlock=2001-4     3071573        3041510        -0.98%
BenchmarkCompaction/type=vertical,blocks=4,series=10000,samplesPerSeriesPerBlock=5001-4     6771648        6741509        -0.45%
BenchmarkCompactionFromHead/labelnames=1,labelvalues=100000-4                               731493         824888         +12.77%
BenchmarkCompactionFromHead/labelnames=10,labelvalues=10000-4                               793918         887311         +11.76%
BenchmarkCompactionFromHead/labelnames=100,labelvalues=1000-4                               811842         905204         +11.50%
BenchmarkCompactionFromHead/labelnames=1000,labelvalues=100-4                               832244         925081         +11.16%
BenchmarkCompactionFromHead/labelnames=10000,labelvalues=10-4                               921553         1019162        +10.59%
benchmark                                                                                   old bytes      new bytes      delta
BenchmarkCompaction/type=normal,blocks=4,series=10000,samplesPerSeriesPerBlock=101-4        40532648       35698276       -11.93%
BenchmarkCompaction/type=normal,blocks=4,series=10000,samplesPerSeriesPerBlock=1001-4       60340216       53409568       -11.49%
BenchmarkCompaction/type=normal,blocks=4,series=10000,samplesPerSeriesPerBlock=2001-4       81087336       72065552       -11.13%
BenchmarkCompaction/type=normal,blocks=4,series=10000,samplesPerSeriesPerBlock=5001-4       142485576      120878544      -15.16%
BenchmarkCompaction/type=vertical,blocks=4,series=10000,samplesPerSeriesPerBlock=101-4      208661368      203831136      -2.31%
BenchmarkCompaction/type=vertical,blocks=4,series=10000,samplesPerSeriesPerBlock=1001-4     347345904      340484696      -1.98%
BenchmarkCompaction/type=vertical,blocks=4,series=10000,samplesPerSeriesPerBlock=2001-4     585185856      576244648      -1.53%
BenchmarkCompaction/type=vertical,blocks=4,series=10000,samplesPerSeriesPerBlock=5001-4     1357641792     1358966528     +0.10%
BenchmarkCompactionFromHead/labelnames=1,labelvalues=100000-4                               126486664      119666744      -5.39%
BenchmarkCompactionFromHead/labelnames=10,labelvalues=10000-4                               122323192      115117224      -5.89%
BenchmarkCompactionFromHead/labelnames=100,labelvalues=1000-4                               126404504      119469864      -5.49%
BenchmarkCompactionFromHead/labelnames=1000,labelvalues=100-4                               119047832      112230408      -5.73%
BenchmarkCompactionFromHead/labelnames=10000,labelvalues=10-4                               136576016      116634800      -14.60%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-12-18 03:49:54 +08:00
										 |  |  | func (m mockIndex) Symbols() index.StringIter { | 
					
						
							|  |  |  | 	l := []string{} | 
					
						
							|  |  |  | 	for s := range m.symbols { | 
					
						
							|  |  |  | 		l = append(l, s) | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 	sort.Strings(l) | 
					
						
							|  |  |  | 	return index.NewStringListIter(l) | 
					
						
							| 
									
										
										
										
											2017-11-30 22:34:49 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-11-06 18:10:04 +08:00
										 |  |  | func (m *mockIndex) AddSeries(ref storage.SeriesRef, l labels.Labels, chunks ...chunks.Meta) error { | 
					
						
							| 
									
										
										
										
											2017-11-30 22:34:49 +08:00
										 |  |  | 	if _, ok := m.series[ref]; ok { | 
					
						
							|  |  |  | 		return errors.Errorf("series with reference %d already added", ref) | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 	for _, lbl := range l { | 
					
						
							|  |  |  | 		m.symbols[lbl.Name] = struct{}{} | 
					
						
							|  |  |  | 		m.symbols[lbl.Value] = struct{}{} | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	s := series{l: l} | 
					
						
							|  |  |  | 	// Actual chunk data is not stored in the index.
 | 
					
						
							|  |  |  | 	for _, c := range chunks { | 
					
						
							|  |  |  | 		c.Chunk = nil | 
					
						
							|  |  |  | 		s.chunks = append(s.chunks, c) | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 	m.series[ref] = s | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	return nil | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | func (m mockIndex) WritePostings(name, value string, it index.Postings) error { | 
					
						
							|  |  |  | 	l := labels.Label{Name: name, Value: value} | 
					
						
							|  |  |  | 	if _, ok := m.postings[l]; ok { | 
					
						
							|  |  |  | 		return errors.Errorf("postings for %s already added", l) | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 	ep, err := index.ExpandPostings(it) | 
					
						
							|  |  |  | 	if err != nil { | 
					
						
							|  |  |  | 		return err | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 	m.postings[l] = ep | 
					
						
							|  |  |  | 	return nil | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | func (m mockIndex) Close() error { | 
					
						
							|  |  |  | 	return nil | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-02-10 01:38:35 +08:00
										 |  |  | func (m mockIndex) SortedLabelValues(name string, matchers ...*labels.Matcher) ([]string, error) { | 
					
						
							|  |  |  | 	values, _ := m.LabelValues(name, matchers...) | 
					
						
							| 
									
										
										
										
											2020-06-25 21:10:29 +08:00
										 |  |  | 	sort.Strings(values) | 
					
						
							|  |  |  | 	return values, nil | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-02-10 01:38:35 +08:00
										 |  |  | func (m mockIndex) LabelValues(name string, matchers ...*labels.Matcher) ([]string, error) { | 
					
						
							| 
									
										
										
										
											2021-07-20 20:38:08 +08:00
										 |  |  | 	var values []string | 
					
						
							| 
									
										
										
										
											2021-02-10 01:38:35 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	if len(matchers) == 0 { | 
					
						
							|  |  |  | 		for l := range m.postings { | 
					
						
							|  |  |  | 			if l.Name == name { | 
					
						
							|  |  |  | 				values = append(values, l.Value) | 
					
						
							|  |  |  | 			} | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 		return values, nil | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	for _, series := range m.series { | 
					
						
							|  |  |  | 		for _, matcher := range matchers { | 
					
						
							|  |  |  | 			if matcher.Matches(series.l.Get(matcher.Name)) { | 
					
						
							| 
									
										
										
										
											2021-07-20 20:38:08 +08:00
										 |  |  | 				// TODO(colega): shouldn't we check all the matchers before adding this to the values?
 | 
					
						
							| 
									
										
										
										
											2021-02-10 01:38:35 +08:00
										 |  |  | 				values = append(values, series.l.Get(name)) | 
					
						
							|  |  |  | 			} | 
					
						
							| 
									
										
										
										
											2020-01-01 19:21:42 +08:00
										 |  |  | 		} | 
					
						
							| 
									
										
										
										
											2017-11-30 22:34:49 +08:00
										 |  |  | 	} | 
					
						
							| 
									
										
										
										
											2021-02-10 01:38:35 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
											  
											
												Replace StringTuples with []string
Benchmarks show slight cpu/allocs improvements.
benchmark                                                               old ns/op      new ns/op      delta
BenchmarkPostingsForMatchers/Head/n="1"-4                               269978625      235305110      -12.84%
BenchmarkPostingsForMatchers/Head/n="1",j="foo"-4                       129739974      121646193      -6.24%
BenchmarkPostingsForMatchers/Head/j="foo",n="1"-4                       123826274      122056253      -1.43%
BenchmarkPostingsForMatchers/Head/n="1",j!="foo"-4                      126962188      130038235      +2.42%
BenchmarkPostingsForMatchers/Head/i=~".*"-4                             6423653989     5991126455     -6.73%
BenchmarkPostingsForMatchers/Head/i=~".+"-4                             6934647521     7033370634     +1.42%
BenchmarkPostingsForMatchers/Head/i=~""-4                               1177781285     1121497736     -4.78%
BenchmarkPostingsForMatchers/Head/i!=""-4                               7033680256     7246094991     +3.02%
BenchmarkPostingsForMatchers/Head/n="1",i=~".*",j="foo"-4               293702332      287440212      -2.13%
BenchmarkPostingsForMatchers/Head/n="1",i=~".*",i!="2",j="foo"-4        307628268      307039964      -0.19%
BenchmarkPostingsForMatchers/Head/n="1",i!=""-4                         512247746      480003862      -6.29%
BenchmarkPostingsForMatchers/Head/n="1",i!="",j="foo"-4                 361199794      367066917      +1.62%
BenchmarkPostingsForMatchers/Head/n="1",i=~".+",j="foo"-4               478863761      476037784      -0.59%
BenchmarkPostingsForMatchers/Head/n="1",i=~"1.+",j="foo"-4              103394659      102902098      -0.48%
BenchmarkPostingsForMatchers/Head/n="1",i=~".+",i!="2",j="foo"-4        482552781      475453903      -1.47%
BenchmarkPostingsForMatchers/Head/n="1",i=~".+",i!~"2.*",j="foo"-4      559257389      589297047      +5.37%
BenchmarkPostingsForMatchers/Block/n="1"-4                              36492          37012          +1.42%
BenchmarkPostingsForMatchers/Block/n="1",j="foo"-4                      557788         611903         +9.70%
BenchmarkPostingsForMatchers/Block/j="foo",n="1"-4                      554443         573814         +3.49%
BenchmarkPostingsForMatchers/Block/n="1",j!="foo"-4                     553227         553826         +0.11%
BenchmarkPostingsForMatchers/Block/i=~".*"-4                            113855090      111707221      -1.89%
BenchmarkPostingsForMatchers/Block/i=~".+"-4                            133994674      136520728      +1.89%
BenchmarkPostingsForMatchers/Block/i=~""-4                              38138091       36299898       -4.82%
BenchmarkPostingsForMatchers/Block/i!=""-4                              28861213       27396723       -5.07%
BenchmarkPostingsForMatchers/Block/n="1",i=~".*",j="foo"-4              112699941      110853868      -1.64%
BenchmarkPostingsForMatchers/Block/n="1",i=~".*",i!="2",j="foo"-4       113198026      111389742      -1.60%
BenchmarkPostingsForMatchers/Block/n="1",i!=""-4                        28994069       27363804       -5.62%
BenchmarkPostingsForMatchers/Block/n="1",i!="",j="foo"-4                29709406       28589223       -3.77%
BenchmarkPostingsForMatchers/Block/n="1",i=~".+",j="foo"-4              134695119      135736971      +0.77%
BenchmarkPostingsForMatchers/Block/n="1",i=~"1.+",j="foo"-4             26783286       25826928       -3.57%
BenchmarkPostingsForMatchers/Block/n="1",i=~".+",i!="2",j="foo"-4       134733254      134116739      -0.46%
BenchmarkPostingsForMatchers/Block/n="1",i=~".+",i!~"2.*",j="foo"-4     160713937      158802768      -1.19%
benchmark                                                               old allocs     new allocs     delta
BenchmarkPostingsForMatchers/Head/n="1"-4                               36             36             +0.00%
BenchmarkPostingsForMatchers/Head/n="1",j="foo"-4                       38             38             +0.00%
BenchmarkPostingsForMatchers/Head/j="foo",n="1"-4                       38             38             +0.00%
BenchmarkPostingsForMatchers/Head/n="1",j!="foo"-4                      42             40             -4.76%
BenchmarkPostingsForMatchers/Head/i=~".*"-4                             61             59             -3.28%
BenchmarkPostingsForMatchers/Head/i=~".+"-4                             100088         100087         -0.00%
BenchmarkPostingsForMatchers/Head/i=~""-4                               100053         100051         -0.00%
BenchmarkPostingsForMatchers/Head/i!=""-4                               100087         100085         -0.00%
BenchmarkPostingsForMatchers/Head/n="1",i=~".*",j="foo"-4               44             42             -4.55%
BenchmarkPostingsForMatchers/Head/n="1",i=~".*",i!="2",j="foo"-4        50             48             -4.00%
BenchmarkPostingsForMatchers/Head/n="1",i!=""-4                         100076         100074         -0.00%
BenchmarkPostingsForMatchers/Head/n="1",i!="",j="foo"-4                 100077         100075         -0.00%
BenchmarkPostingsForMatchers/Head/n="1",i=~".+",j="foo"-4               100077         100074         -0.00%
BenchmarkPostingsForMatchers/Head/n="1",i=~"1.+",j="foo"-4              11167          11165          -0.02%
BenchmarkPostingsForMatchers/Head/n="1",i=~".+",i!="2",j="foo"-4        100082         100080         -0.00%
BenchmarkPostingsForMatchers/Head/n="1",i=~".+",i!~"2.*",j="foo"-4      111265         111261         -0.00%
BenchmarkPostingsForMatchers/Block/n="1"-4                              6              6              +0.00%
BenchmarkPostingsForMatchers/Block/n="1",j="foo"-4                      11             11             +0.00%
BenchmarkPostingsForMatchers/Block/j="foo",n="1"-4                      11             11             +0.00%
BenchmarkPostingsForMatchers/Block/n="1",j!="foo"-4                     15             13             -13.33%
BenchmarkPostingsForMatchers/Block/i=~".*"-4                            12             10             -16.67%
BenchmarkPostingsForMatchers/Block/i=~".+"-4                            100040         100038         -0.00%
BenchmarkPostingsForMatchers/Block/i=~""-4                              100045         100043         -0.00%
BenchmarkPostingsForMatchers/Block/i!=""-4                              100041         100039         -0.00%
BenchmarkPostingsForMatchers/Block/n="1",i=~".*",j="foo"-4              17             15             -11.76%
BenchmarkPostingsForMatchers/Block/n="1",i=~".*",i!="2",j="foo"-4       23             21             -8.70%
BenchmarkPostingsForMatchers/Block/n="1",i!=""-4                        100046         100044         -0.00%
BenchmarkPostingsForMatchers/Block/n="1",i!="",j="foo"-4                100050         100048         -0.00%
BenchmarkPostingsForMatchers/Block/n="1",i=~".+",j="foo"-4              100049         100047         -0.00%
BenchmarkPostingsForMatchers/Block/n="1",i=~"1.+",j="foo"-4             11150          11148          -0.02%
BenchmarkPostingsForMatchers/Block/n="1",i=~".+",i!="2",j="foo"-4       100055         100053         -0.00%
BenchmarkPostingsForMatchers/Block/n="1",i=~".+",i!~"2.*",j="foo"-4     111238         111234         -0.00%
benchmark                                                               old bytes     new bytes     delta
BenchmarkPostingsForMatchers/Head/n="1"-4                               10887816      10887817      +0.00%
BenchmarkPostingsForMatchers/Head/n="1",j="foo"-4                       5456648       5456648       +0.00%
BenchmarkPostingsForMatchers/Head/j="foo",n="1"-4                       5456648       5456648       +0.00%
BenchmarkPostingsForMatchers/Head/n="1",j!="foo"-4                      5456792       5456712       -0.00%
BenchmarkPostingsForMatchers/Head/i=~".*"-4                             258254408     258254328     -0.00%
BenchmarkPostingsForMatchers/Head/i=~".+"-4                             273912888     273912904     +0.00%
BenchmarkPostingsForMatchers/Head/i=~""-4                               17266680      17266600      -0.00%
BenchmarkPostingsForMatchers/Head/i!=""-4                               273912416     273912336     -0.00%
BenchmarkPostingsForMatchers/Head/n="1",i=~".*",j="foo"-4               7062578       7062498       -0.00%
BenchmarkPostingsForMatchers/Head/n="1",i=~".*",i!="2",j="foo"-4        7062770       7062690       -0.00%
BenchmarkPostingsForMatchers/Head/n="1",i!=""-4                         28152346      28152266      -0.00%
BenchmarkPostingsForMatchers/Head/n="1",i!="",j="foo"-4                 22721178      22721098      -0.00%
BenchmarkPostingsForMatchers/Head/n="1",i=~".+",j="foo"-4               22721336      22721224      -0.00%
BenchmarkPostingsForMatchers/Head/n="1",i=~"1.+",j="foo"-4              3623804       3623733       -0.00%
BenchmarkPostingsForMatchers/Head/n="1",i=~".+",i!="2",j="foo"-4        22721480      22721400      -0.00%
BenchmarkPostingsForMatchers/Head/n="1",i=~".+",i!~"2.*",j="foo"-4      24816652      24816444      -0.00%
BenchmarkPostingsForMatchers/Block/n="1"-4                              296           296           +0.00%
BenchmarkPostingsForMatchers/Block/n="1",j="foo"-4                      424           424           +0.00%
BenchmarkPostingsForMatchers/Block/j="foo",n="1"-4                      424           424           +0.00%
BenchmarkPostingsForMatchers/Block/n="1",j!="foo"-4                     1544          1464          -5.18%
BenchmarkPostingsForMatchers/Block/i=~".*"-4                            1606114       1606045       -0.00%
BenchmarkPostingsForMatchers/Block/i=~".+"-4                            17264709      17264629      -0.00%
BenchmarkPostingsForMatchers/Block/i=~""-4                              17264780      17264696      -0.00%
BenchmarkPostingsForMatchers/Block/i!=""-4                              17264680      17264600      -0.00%
BenchmarkPostingsForMatchers/Block/n="1",i=~".*",j="foo"-4              1606253       1606165       -0.01%
BenchmarkPostingsForMatchers/Block/n="1",i=~".*",i!="2",j="foo"-4       1606445       1606348       -0.01%
BenchmarkPostingsForMatchers/Block/n="1",i!=""-4                        17264808      17264728      -0.00%
BenchmarkPostingsForMatchers/Block/n="1",i!="",j="foo"-4                17264936      17264856      -0.00%
BenchmarkPostingsForMatchers/Block/n="1",i=~".+",j="foo"-4              17264965      17264885      -0.00%
BenchmarkPostingsForMatchers/Block/n="1",i=~"1.+",j="foo"-4             3148262       3148182       -0.00%
BenchmarkPostingsForMatchers/Block/n="1",i=~".+",i!="2",j="foo"-4       17265141      17265061      -0.00%
BenchmarkPostingsForMatchers/Block/n="1",i=~".+",i!~"2.*",j="foo"-4     20416944      20416784      -0.00%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2020-01-01 19:38:01 +08:00
										 |  |  | 	return values, nil | 
					
						
							| 
									
										
										
										
											2017-11-30 22:34:49 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-11-06 18:10:04 +08:00
										 |  |  | func (m mockIndex) LabelValueFor(id storage.SeriesRef, label string) (string, error) { | 
					
						
							| 
									
										
										
										
											2021-02-10 01:38:35 +08:00
										 |  |  | 	return m.series[id].l.Get(label), nil | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-11-06 18:10:04 +08:00
										 |  |  | func (m mockIndex) LabelNamesFor(ids ...storage.SeriesRef) ([]string, error) { | 
					
						
							| 
									
										
										
										
											2021-07-20 20:38:08 +08:00
										 |  |  | 	namesMap := make(map[string]bool) | 
					
						
							|  |  |  | 	for _, id := range ids { | 
					
						
							|  |  |  | 		for _, lbl := range m.series[id].l { | 
					
						
							|  |  |  | 			namesMap[lbl.Name] = true | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 	names := make([]string, 0, len(namesMap)) | 
					
						
							|  |  |  | 	for name := range namesMap { | 
					
						
							|  |  |  | 		names = append(names, name) | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 	return names, nil | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
											  
											
												Reduce memory used by postings offset table.
Rather than keeping the offset of each postings list, instead
keep the nth offset of the offset of the posting list. As postings
list offsets have always been sorted, we can then get to the closest
entry before the one we want an iterate forwards.
I haven't done much tuning on the 32 number, it was chosen to try
not to read through more than a 4k page of data.
Switch to a bulk interface for fetching postings. Use it to avoid having
to re-read parts of the posting offset table when querying lots of it.
For a index with what BenchmarkHeadPostingForMatchers uses RAM
for r.postings drops from 3.79MB to 80.19kB or about 48x.
Bytes allocated go down by 30%, and suprisingly CPU usage drops by
4-6% for typical queries too.
benchmark                                                               old ns/op      new ns/op      delta
BenchmarkPostingsForMatchers/Block/n="1"-4                              35231          36673          +4.09%
BenchmarkPostingsForMatchers/Block/n="1",j="foo"-4                      563380         540627         -4.04%
BenchmarkPostingsForMatchers/Block/j="foo",n="1"-4                      536782         534186         -0.48%
BenchmarkPostingsForMatchers/Block/n="1",j!="foo"-4                     533990         541550         +1.42%
BenchmarkPostingsForMatchers/Block/i=~".*"-4                            113374598      117969608      +4.05%
BenchmarkPostingsForMatchers/Block/i=~".+"-4                            146329884      139651442      -4.56%
BenchmarkPostingsForMatchers/Block/i=~""-4                              50346510       44961127       -10.70%
BenchmarkPostingsForMatchers/Block/i!=""-4                              41261550       35356165       -14.31%
BenchmarkPostingsForMatchers/Block/n="1",i=~".*",j="foo"-4              112544418      116904010      +3.87%
BenchmarkPostingsForMatchers/Block/n="1",i=~".*",i!="2",j="foo"-4       112487086      116864918      +3.89%
BenchmarkPostingsForMatchers/Block/n="1",i!=""-4                        41094758       35457904       -13.72%
BenchmarkPostingsForMatchers/Block/n="1",i!="",j="foo"-4                41906372       36151473       -13.73%
BenchmarkPostingsForMatchers/Block/n="1",i=~".+",j="foo"-4              147262414      140424800      -4.64%
BenchmarkPostingsForMatchers/Block/n="1",i=~"1.+",j="foo"-4             28615629       27872072       -2.60%
BenchmarkPostingsForMatchers/Block/n="1",i=~".+",i!="2",j="foo"-4       147117177      140462403      -4.52%
BenchmarkPostingsForMatchers/Block/n="1",i=~".+",i!~"2.*",j="foo"-4     175096826      167902298      -4.11%
benchmark                                                               old allocs     new allocs     delta
BenchmarkPostingsForMatchers/Block/n="1"-4                              4              6              +50.00%
BenchmarkPostingsForMatchers/Block/n="1",j="foo"-4                      7              11             +57.14%
BenchmarkPostingsForMatchers/Block/j="foo",n="1"-4                      7              11             +57.14%
BenchmarkPostingsForMatchers/Block/n="1",j!="foo"-4                     15             17             +13.33%
BenchmarkPostingsForMatchers/Block/i=~".*"-4                            100010         100012         +0.00%
BenchmarkPostingsForMatchers/Block/i=~".+"-4                            200069         200040         -0.01%
BenchmarkPostingsForMatchers/Block/i=~""-4                              200072         200045         -0.01%
BenchmarkPostingsForMatchers/Block/i!=""-4                              200070         200041         -0.01%
BenchmarkPostingsForMatchers/Block/n="1",i=~".*",j="foo"-4              100013         100017         +0.00%
BenchmarkPostingsForMatchers/Block/n="1",i=~".*",i!="2",j="foo"-4       100017         100023         +0.01%
BenchmarkPostingsForMatchers/Block/n="1",i!=""-4                        200073         200046         -0.01%
BenchmarkPostingsForMatchers/Block/n="1",i!="",j="foo"-4                200075         200050         -0.01%
BenchmarkPostingsForMatchers/Block/n="1",i=~".+",j="foo"-4              200074         200049         -0.01%
BenchmarkPostingsForMatchers/Block/n="1",i=~"1.+",j="foo"-4             111165         111150         -0.01%
BenchmarkPostingsForMatchers/Block/n="1",i=~".+",i!="2",j="foo"-4       200078         200055         -0.01%
BenchmarkPostingsForMatchers/Block/n="1",i=~".+",i!~"2.*",j="foo"-4     311282         311238         -0.01%
benchmark                                                               old bytes     new bytes     delta
BenchmarkPostingsForMatchers/Block/n="1"-4                              264           296           +12.12%
BenchmarkPostingsForMatchers/Block/n="1",j="foo"-4                      360           424           +17.78%
BenchmarkPostingsForMatchers/Block/j="foo",n="1"-4                      360           424           +17.78%
BenchmarkPostingsForMatchers/Block/n="1",j!="foo"-4                     520           552           +6.15%
BenchmarkPostingsForMatchers/Block/i=~".*"-4                            1600461       1600482       +0.00%
BenchmarkPostingsForMatchers/Block/i=~".+"-4                            24900801      17259077      -30.69%
BenchmarkPostingsForMatchers/Block/i=~""-4                              24900836      17259151      -30.69%
BenchmarkPostingsForMatchers/Block/i!=""-4                              24900760      17259048      -30.69%
BenchmarkPostingsForMatchers/Block/n="1",i=~".*",j="foo"-4              1600557       1600621       +0.00%
BenchmarkPostingsForMatchers/Block/n="1",i=~".*",i!="2",j="foo"-4       1600717       1600813       +0.01%
BenchmarkPostingsForMatchers/Block/n="1",i!=""-4                        24900856      17259176      -30.69%
BenchmarkPostingsForMatchers/Block/n="1",i!="",j="foo"-4                24900952      17259304      -30.69%
BenchmarkPostingsForMatchers/Block/n="1",i=~".+",j="foo"-4              24900993      17259333      -30.69%
BenchmarkPostingsForMatchers/Block/n="1",i=~"1.+",j="foo"-4             3788311       3142630       -17.04%
BenchmarkPostingsForMatchers/Block/n="1",i=~".+",i!="2",j="foo"-4       24901137      17259509      -30.69%
BenchmarkPostingsForMatchers/Block/n="1",i=~".+",i!~"2.*",j="foo"-4     28693086      20405680      -28.88%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-12-06 02:27:40 +08:00
										 |  |  | func (m mockIndex) Postings(name string, values ...string) (index.Postings, error) { | 
					
						
							|  |  |  | 	res := make([]index.Postings, 0, len(values)) | 
					
						
							|  |  |  | 	for _, value := range values { | 
					
						
							|  |  |  | 		l := labels.Label{Name: name, Value: value} | 
					
						
							|  |  |  | 		res = append(res, index.NewListPostings(m.postings[l])) | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 	return index.Merge(res...), nil | 
					
						
							| 
									
										
										
										
											2017-11-30 22:34:49 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | func (m mockIndex) SortedPostings(p index.Postings) index.Postings { | 
					
						
							|  |  |  | 	ep, err := index.ExpandPostings(p) | 
					
						
							|  |  |  | 	if err != nil { | 
					
						
							|  |  |  | 		return index.ErrPostings(errors.Wrap(err, "expand postings")) | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	sort.Slice(ep, func(i, j int) bool { | 
					
						
							|  |  |  | 		return labels.Compare(m.series[ep[i]].l, m.series[ep[j]].l) < 0 | 
					
						
							|  |  |  | 	}) | 
					
						
							|  |  |  | 	return index.NewListPostings(ep) | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-11-06 18:10:04 +08:00
										 |  |  | func (m mockIndex) Series(ref storage.SeriesRef, lset *labels.Labels, chks *[]chunks.Meta) error { | 
					
						
							| 
									
										
										
										
											2017-11-30 22:34:49 +08:00
										 |  |  | 	s, ok := m.series[ref] | 
					
						
							|  |  |  | 	if !ok { | 
					
						
							| 
									
										
										
										
											2020-03-17 05:52:02 +08:00
										 |  |  | 		return storage.ErrNotFound | 
					
						
							| 
									
										
										
										
											2017-11-30 22:34:49 +08:00
										 |  |  | 	} | 
					
						
							|  |  |  | 	*lset = append((*lset)[:0], s.l...) | 
					
						
							|  |  |  | 	*chks = append((*chks)[:0], s.chunks...) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	return nil | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-07-20 20:38:08 +08:00
										 |  |  | func (m mockIndex) LabelNames(matchers ...*labels.Matcher) ([]string, error) { | 
					
						
							| 
									
										
										
										
											2020-01-01 19:21:42 +08:00
										 |  |  | 	names := map[string]struct{}{} | 
					
						
							| 
									
										
										
										
											2021-07-20 20:38:08 +08:00
										 |  |  | 	if len(matchers) == 0 { | 
					
						
							|  |  |  | 		for l := range m.postings { | 
					
						
							|  |  |  | 			names[l.Name] = struct{}{} | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 	} else { | 
					
						
							|  |  |  | 		for _, series := range m.series { | 
					
						
							|  |  |  | 			matches := true | 
					
						
							|  |  |  | 			for _, matcher := range matchers { | 
					
						
							|  |  |  | 				matches = matches || matcher.Matches(series.l.Get(matcher.Name)) | 
					
						
							|  |  |  | 				if !matches { | 
					
						
							|  |  |  | 					break | 
					
						
							|  |  |  | 				} | 
					
						
							|  |  |  | 			} | 
					
						
							|  |  |  | 			if matches { | 
					
						
							|  |  |  | 				for _, lbl := range series.l { | 
					
						
							|  |  |  | 					names[lbl.Name] = struct{}{} | 
					
						
							|  |  |  | 				} | 
					
						
							|  |  |  | 			} | 
					
						
							|  |  |  | 		} | 
					
						
							| 
									
										
										
										
											2018-11-07 23:52:41 +08:00
										 |  |  | 	} | 
					
						
							| 
									
										
										
										
											2020-01-01 19:21:42 +08:00
										 |  |  | 	l := make([]string, 0, len(names)) | 
					
						
							|  |  |  | 	for name := range names { | 
					
						
							|  |  |  | 		l = append(l, name) | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 	sort.Strings(l) | 
					
						
							|  |  |  | 	return l, nil | 
					
						
							| 
									
										
										
										
											2018-11-07 23:52:41 +08:00
										 |  |  | } | 
					
						
							| 
									
										
										
										
											2019-01-28 19:24:49 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-02-14 21:29:41 +08:00
										 |  |  | func BenchmarkQueryIterator(b *testing.B) { | 
					
						
							|  |  |  | 	cases := []struct { | 
					
						
							|  |  |  | 		numBlocks                   int | 
					
						
							|  |  |  | 		numSeries                   int | 
					
						
							|  |  |  | 		numSamplesPerSeriesPerBlock int | 
					
						
							|  |  |  | 		overlapPercentages          []int // >=0, <=100, this is w.r.t. the previous block.
 | 
					
						
							|  |  |  | 	}{ | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			numBlocks:                   20, | 
					
						
							|  |  |  | 			numSeries:                   1000, | 
					
						
							|  |  |  | 			numSamplesPerSeriesPerBlock: 20000, | 
					
						
							|  |  |  | 			overlapPercentages:          []int{0, 10, 30}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	for _, c := range cases { | 
					
						
							|  |  |  | 		for _, overlapPercentage := range c.overlapPercentages { | 
					
						
							|  |  |  | 			benchMsg := fmt.Sprintf("nBlocks=%d,nSeries=%d,numSamplesPerSeriesPerBlock=%d,overlap=%d%%", | 
					
						
							|  |  |  | 				c.numBlocks, c.numSeries, c.numSamplesPerSeriesPerBlock, overlapPercentage) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 			b.Run(benchMsg, func(b *testing.B) { | 
					
						
							|  |  |  | 				dir, err := ioutil.TempDir("", "bench_query_iterator") | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 				require.NoError(b, err) | 
					
						
							| 
									
										
										
										
											2019-02-14 21:29:41 +08:00
										 |  |  | 				defer func() { | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 					require.NoError(b, os.RemoveAll(dir)) | 
					
						
							| 
									
										
										
										
											2019-02-14 21:29:41 +08:00
										 |  |  | 				}() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 				var ( | 
					
						
							|  |  |  | 					blocks          []*Block | 
					
						
							|  |  |  | 					overlapDelta    = int64(overlapPercentage * c.numSamplesPerSeriesPerBlock / 100) | 
					
						
							|  |  |  | 					prefilledLabels []map[string]string | 
					
						
							| 
									
										
										
										
											2020-02-06 23:58:38 +08:00
										 |  |  | 					generatedSeries []storage.Series | 
					
						
							| 
									
										
										
										
											2019-02-14 21:29:41 +08:00
										 |  |  | 				) | 
					
						
							|  |  |  | 				for i := int64(0); i < int64(c.numBlocks); i++ { | 
					
						
							|  |  |  | 					offset := i * overlapDelta | 
					
						
							|  |  |  | 					mint := i*int64(c.numSamplesPerSeriesPerBlock) - offset | 
					
						
							|  |  |  | 					maxt := mint + int64(c.numSamplesPerSeriesPerBlock) - 1 | 
					
						
							|  |  |  | 					if len(prefilledLabels) == 0 { | 
					
						
							|  |  |  | 						generatedSeries = genSeries(c.numSeries, 10, mint, maxt) | 
					
						
							|  |  |  | 						for _, s := range generatedSeries { | 
					
						
							|  |  |  | 							prefilledLabels = append(prefilledLabels, s.Labels().Map()) | 
					
						
							|  |  |  | 						} | 
					
						
							|  |  |  | 					} else { | 
					
						
							|  |  |  | 						generatedSeries = populateSeries(prefilledLabels, mint, maxt) | 
					
						
							|  |  |  | 					} | 
					
						
							|  |  |  | 					block, err := OpenBlock(nil, createBlock(b, dir, generatedSeries), nil) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 					require.NoError(b, err) | 
					
						
							| 
									
										
										
										
											2019-02-14 21:29:41 +08:00
										 |  |  | 					blocks = append(blocks, block) | 
					
						
							|  |  |  | 					defer block.Close() | 
					
						
							|  |  |  | 				} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 				qblocks := make([]storage.Querier, 0, len(blocks)) | 
					
						
							| 
									
										
										
										
											2019-02-14 21:29:41 +08:00
										 |  |  | 				for _, blk := range blocks { | 
					
						
							|  |  |  | 					q, err := NewBlockQuerier(blk, math.MinInt64, math.MaxInt64) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 					require.NoError(b, err) | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 					qblocks = append(qblocks, q) | 
					
						
							| 
									
										
										
										
											2019-02-14 21:29:41 +08:00
										 |  |  | 				} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 				sq := storage.NewMergeQuerier(qblocks, nil, storage.ChainedSeriesMerge) | 
					
						
							| 
									
										
										
										
											2019-02-14 21:29:41 +08:00
										 |  |  | 				defer sq.Close() | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-11-19 03:53:33 +08:00
										 |  |  | 				benchQuery(b, c.numSeries, sq, labels.Selector{labels.MustNewMatcher(labels.MatchRegexp, "__name__", ".*")}) | 
					
						
							| 
									
										
										
										
											2019-02-14 21:29:41 +08:00
										 |  |  | 			}) | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | func BenchmarkQuerySeek(b *testing.B) { | 
					
						
							|  |  |  | 	cases := []struct { | 
					
						
							|  |  |  | 		numBlocks                   int | 
					
						
							|  |  |  | 		numSeries                   int | 
					
						
							|  |  |  | 		numSamplesPerSeriesPerBlock int | 
					
						
							|  |  |  | 		overlapPercentages          []int // >=0, <=100, this is w.r.t. the previous block.
 | 
					
						
							|  |  |  | 	}{ | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			numBlocks:                   20, | 
					
						
							|  |  |  | 			numSeries:                   100, | 
					
						
							|  |  |  | 			numSamplesPerSeriesPerBlock: 2000, | 
					
						
							|  |  |  | 			overlapPercentages:          []int{0, 10, 30, 50}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	for _, c := range cases { | 
					
						
							|  |  |  | 		for _, overlapPercentage := range c.overlapPercentages { | 
					
						
							|  |  |  | 			benchMsg := fmt.Sprintf("nBlocks=%d,nSeries=%d,numSamplesPerSeriesPerBlock=%d,overlap=%d%%", | 
					
						
							|  |  |  | 				c.numBlocks, c.numSeries, c.numSamplesPerSeriesPerBlock, overlapPercentage) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 			b.Run(benchMsg, func(b *testing.B) { | 
					
						
							|  |  |  | 				dir, err := ioutil.TempDir("", "bench_query_iterator") | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 				require.NoError(b, err) | 
					
						
							| 
									
										
										
										
											2019-02-14 21:29:41 +08:00
										 |  |  | 				defer func() { | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 					require.NoError(b, os.RemoveAll(dir)) | 
					
						
							| 
									
										
										
										
											2019-02-14 21:29:41 +08:00
										 |  |  | 				}() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 				var ( | 
					
						
							|  |  |  | 					blocks          []*Block | 
					
						
							|  |  |  | 					overlapDelta    = int64(overlapPercentage * c.numSamplesPerSeriesPerBlock / 100) | 
					
						
							|  |  |  | 					prefilledLabels []map[string]string | 
					
						
							| 
									
										
										
										
											2020-02-06 23:58:38 +08:00
										 |  |  | 					generatedSeries []storage.Series | 
					
						
							| 
									
										
										
										
											2019-02-14 21:29:41 +08:00
										 |  |  | 				) | 
					
						
							|  |  |  | 				for i := int64(0); i < int64(c.numBlocks); i++ { | 
					
						
							|  |  |  | 					offset := i * overlapDelta | 
					
						
							|  |  |  | 					mint := i*int64(c.numSamplesPerSeriesPerBlock) - offset | 
					
						
							|  |  |  | 					maxt := mint + int64(c.numSamplesPerSeriesPerBlock) - 1 | 
					
						
							|  |  |  | 					if len(prefilledLabels) == 0 { | 
					
						
							|  |  |  | 						generatedSeries = genSeries(c.numSeries, 10, mint, maxt) | 
					
						
							|  |  |  | 						for _, s := range generatedSeries { | 
					
						
							|  |  |  | 							prefilledLabels = append(prefilledLabels, s.Labels().Map()) | 
					
						
							|  |  |  | 						} | 
					
						
							|  |  |  | 					} else { | 
					
						
							|  |  |  | 						generatedSeries = populateSeries(prefilledLabels, mint, maxt) | 
					
						
							|  |  |  | 					} | 
					
						
							|  |  |  | 					block, err := OpenBlock(nil, createBlock(b, dir, generatedSeries), nil) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 					require.NoError(b, err) | 
					
						
							| 
									
										
										
										
											2019-02-14 21:29:41 +08:00
										 |  |  | 					blocks = append(blocks, block) | 
					
						
							|  |  |  | 					defer block.Close() | 
					
						
							|  |  |  | 				} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 				qblocks := make([]storage.Querier, 0, len(blocks)) | 
					
						
							| 
									
										
										
										
											2019-02-14 21:29:41 +08:00
										 |  |  | 				for _, blk := range blocks { | 
					
						
							|  |  |  | 					q, err := NewBlockQuerier(blk, math.MinInt64, math.MaxInt64) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 					require.NoError(b, err) | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 					qblocks = append(qblocks, q) | 
					
						
							| 
									
										
										
										
											2019-02-14 21:29:41 +08:00
										 |  |  | 				} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 				sq := storage.NewMergeQuerier(qblocks, nil, storage.ChainedSeriesMerge) | 
					
						
							| 
									
										
										
										
											2019-02-14 21:29:41 +08:00
										 |  |  | 				defer sq.Close() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 				mint := blocks[0].meta.MinTime | 
					
						
							|  |  |  | 				maxt := blocks[len(blocks)-1].meta.MaxTime | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 				b.ResetTimer() | 
					
						
							|  |  |  | 				b.ReportAllocs() | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-06-10 00:57:31 +08:00
										 |  |  | 				ss := sq.Select(false, nil, labels.MustNewMatcher(labels.MatchRegexp, "__name__", ".*")) | 
					
						
							| 
									
										
										
										
											2019-02-14 21:29:41 +08:00
										 |  |  | 				for ss.Next() { | 
					
						
							|  |  |  | 					it := ss.At().Iterator() | 
					
						
							|  |  |  | 					for t := mint; t <= maxt; t++ { | 
					
						
							|  |  |  | 						it.Seek(t) | 
					
						
							|  |  |  | 					} | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 					require.NoError(b, it.Err()) | 
					
						
							| 
									
										
										
										
											2019-02-14 21:29:41 +08:00
										 |  |  | 				} | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 				require.NoError(b, ss.Err()) | 
					
						
							|  |  |  | 				require.NoError(b, err) | 
					
						
							|  |  |  | 				require.Equal(b, 0, len(ss.Warnings())) | 
					
						
							| 
									
										
										
										
											2019-02-14 21:29:41 +08:00
										 |  |  | 			}) | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | } | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-05-27 19:24:46 +08:00
										 |  |  | // Refer to https://github.com/prometheus/prometheus/issues/2651.
 | 
					
						
							|  |  |  | func BenchmarkSetMatcher(b *testing.B) { | 
					
						
							|  |  |  | 	cases := []struct { | 
					
						
							|  |  |  | 		numBlocks                   int | 
					
						
							|  |  |  | 		numSeries                   int | 
					
						
							|  |  |  | 		numSamplesPerSeriesPerBlock int | 
					
						
							|  |  |  | 		cardinality                 int | 
					
						
							|  |  |  | 		pattern                     string | 
					
						
							|  |  |  | 	}{ | 
					
						
							|  |  |  | 		// The first three cases are to find out whether the set
 | 
					
						
							|  |  |  | 		// matcher is always faster than regex matcher.
 | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			numBlocks:                   1, | 
					
						
							|  |  |  | 			numSeries:                   1, | 
					
						
							|  |  |  | 			numSamplesPerSeriesPerBlock: 10, | 
					
						
							|  |  |  | 			cardinality:                 100, | 
					
						
							| 
									
										
										
										
											2020-02-05 18:53:12 +08:00
										 |  |  | 			pattern:                     "1|2|3|4|5|6|7|8|9|10", | 
					
						
							| 
									
										
										
										
											2019-05-27 19:24:46 +08:00
										 |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			numBlocks:                   1, | 
					
						
							|  |  |  | 			numSeries:                   15, | 
					
						
							|  |  |  | 			numSamplesPerSeriesPerBlock: 10, | 
					
						
							|  |  |  | 			cardinality:                 100, | 
					
						
							| 
									
										
										
										
											2020-02-05 18:53:12 +08:00
										 |  |  | 			pattern:                     "1|2|3|4|5|6|7|8|9|10", | 
					
						
							| 
									
										
										
										
											2019-05-27 19:24:46 +08:00
										 |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			numBlocks:                   1, | 
					
						
							|  |  |  | 			numSeries:                   15, | 
					
						
							|  |  |  | 			numSamplesPerSeriesPerBlock: 10, | 
					
						
							|  |  |  | 			cardinality:                 100, | 
					
						
							| 
									
										
										
										
											2020-02-05 18:53:12 +08:00
										 |  |  | 			pattern:                     "1|2|3", | 
					
						
							| 
									
										
										
										
											2019-05-27 19:24:46 +08:00
										 |  |  | 		}, | 
					
						
							|  |  |  | 		// Big data sizes benchmarks.
 | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			numBlocks:                   20, | 
					
						
							|  |  |  | 			numSeries:                   1000, | 
					
						
							|  |  |  | 			numSamplesPerSeriesPerBlock: 10, | 
					
						
							|  |  |  | 			cardinality:                 100, | 
					
						
							| 
									
										
										
										
											2020-02-05 18:53:12 +08:00
										 |  |  | 			pattern:                     "1|2|3", | 
					
						
							| 
									
										
										
										
											2019-05-27 19:24:46 +08:00
										 |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			numBlocks:                   20, | 
					
						
							|  |  |  | 			numSeries:                   1000, | 
					
						
							|  |  |  | 			numSamplesPerSeriesPerBlock: 10, | 
					
						
							|  |  |  | 			cardinality:                 100, | 
					
						
							| 
									
										
										
										
											2020-02-05 18:53:12 +08:00
										 |  |  | 			pattern:                     "1|2|3|4|5|6|7|8|9|10", | 
					
						
							| 
									
										
										
										
											2019-05-27 19:24:46 +08:00
										 |  |  | 		}, | 
					
						
							|  |  |  | 		// Increase cardinality.
 | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			numBlocks:                   1, | 
					
						
							|  |  |  | 			numSeries:                   100000, | 
					
						
							|  |  |  | 			numSamplesPerSeriesPerBlock: 10, | 
					
						
							|  |  |  | 			cardinality:                 100000, | 
					
						
							| 
									
										
										
										
											2020-02-05 18:53:12 +08:00
										 |  |  | 			pattern:                     "1|2|3|4|5|6|7|8|9|10", | 
					
						
							| 
									
										
										
										
											2019-05-27 19:24:46 +08:00
										 |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			numBlocks:                   1, | 
					
						
							|  |  |  | 			numSeries:                   500000, | 
					
						
							|  |  |  | 			numSamplesPerSeriesPerBlock: 10, | 
					
						
							|  |  |  | 			cardinality:                 500000, | 
					
						
							| 
									
										
										
										
											2020-02-05 18:53:12 +08:00
										 |  |  | 			pattern:                     "1|2|3|4|5|6|7|8|9|10", | 
					
						
							| 
									
										
										
										
											2019-05-27 19:24:46 +08:00
										 |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			numBlocks:                   10, | 
					
						
							|  |  |  | 			numSeries:                   500000, | 
					
						
							|  |  |  | 			numSamplesPerSeriesPerBlock: 10, | 
					
						
							|  |  |  | 			cardinality:                 500000, | 
					
						
							| 
									
										
										
										
											2020-02-05 18:53:12 +08:00
										 |  |  | 			pattern:                     "1|2|3|4|5|6|7|8|9|10", | 
					
						
							| 
									
										
										
										
											2019-05-27 19:24:46 +08:00
										 |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			numBlocks:                   1, | 
					
						
							|  |  |  | 			numSeries:                   1000000, | 
					
						
							|  |  |  | 			numSamplesPerSeriesPerBlock: 10, | 
					
						
							|  |  |  | 			cardinality:                 1000000, | 
					
						
							| 
									
										
										
										
											2020-02-05 18:53:12 +08:00
										 |  |  | 			pattern:                     "1|2|3|4|5|6|7|8|9|10", | 
					
						
							| 
									
										
										
										
											2019-05-27 19:24:46 +08:00
										 |  |  | 		}, | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	for _, c := range cases { | 
					
						
							|  |  |  | 		dir, err := ioutil.TempDir("", "bench_postings_for_matchers") | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 		require.NoError(b, err) | 
					
						
							| 
									
										
										
										
											2019-05-27 19:24:46 +08:00
										 |  |  | 		defer func() { | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 			require.NoError(b, os.RemoveAll(dir)) | 
					
						
							| 
									
										
										
										
											2019-05-27 19:24:46 +08:00
										 |  |  | 		}() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 		var ( | 
					
						
							|  |  |  | 			blocks          []*Block | 
					
						
							|  |  |  | 			prefilledLabels []map[string]string | 
					
						
							| 
									
										
										
										
											2020-02-06 23:58:38 +08:00
										 |  |  | 			generatedSeries []storage.Series | 
					
						
							| 
									
										
										
										
											2019-05-27 19:24:46 +08:00
										 |  |  | 		) | 
					
						
							|  |  |  | 		for i := int64(0); i < int64(c.numBlocks); i++ { | 
					
						
							|  |  |  | 			mint := i * int64(c.numSamplesPerSeriesPerBlock) | 
					
						
							|  |  |  | 			maxt := mint + int64(c.numSamplesPerSeriesPerBlock) - 1 | 
					
						
							|  |  |  | 			if len(prefilledLabels) == 0 { | 
					
						
							|  |  |  | 				generatedSeries = genSeries(c.numSeries, 10, mint, maxt) | 
					
						
							|  |  |  | 				for _, s := range generatedSeries { | 
					
						
							|  |  |  | 					prefilledLabels = append(prefilledLabels, s.Labels().Map()) | 
					
						
							|  |  |  | 				} | 
					
						
							|  |  |  | 			} else { | 
					
						
							|  |  |  | 				generatedSeries = populateSeries(prefilledLabels, mint, maxt) | 
					
						
							|  |  |  | 			} | 
					
						
							|  |  |  | 			block, err := OpenBlock(nil, createBlock(b, dir, generatedSeries), nil) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 			require.NoError(b, err) | 
					
						
							| 
									
										
										
										
											2019-05-27 19:24:46 +08:00
										 |  |  | 			blocks = append(blocks, block) | 
					
						
							|  |  |  | 			defer block.Close() | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 		qblocks := make([]storage.Querier, 0, len(blocks)) | 
					
						
							| 
									
										
										
										
											2019-05-27 19:24:46 +08:00
										 |  |  | 		for _, blk := range blocks { | 
					
						
							|  |  |  | 			q, err := NewBlockQuerier(blk, math.MinInt64, math.MaxInt64) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 			require.NoError(b, err) | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 			qblocks = append(qblocks, q) | 
					
						
							| 
									
										
										
										
											2019-05-27 19:24:46 +08:00
										 |  |  | 		} | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 		sq := storage.NewMergeQuerier(qblocks, nil, storage.ChainedSeriesMerge) | 
					
						
							|  |  |  | 		defer sq.Close() | 
					
						
							| 
									
										
										
										
											2019-05-27 19:24:46 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 		benchMsg := fmt.Sprintf("nSeries=%d,nBlocks=%d,cardinality=%d,pattern=\"%s\"", c.numSeries, c.numBlocks, c.cardinality, c.pattern) | 
					
						
							|  |  |  | 		b.Run(benchMsg, func(b *testing.B) { | 
					
						
							|  |  |  | 			b.ResetTimer() | 
					
						
							|  |  |  | 			b.ReportAllocs() | 
					
						
							|  |  |  | 			for n := 0; n < b.N; n++ { | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 				ss := sq.Select(false, nil, labels.MustNewMatcher(labels.MatchRegexp, "test", c.pattern)) | 
					
						
							| 
									
										
										
										
											2020-06-10 00:57:31 +08:00
										 |  |  | 				for ss.Next() { | 
					
						
							|  |  |  | 				} | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 				require.NoError(b, ss.Err()) | 
					
						
							|  |  |  | 				require.Equal(b, 0, len(ss.Warnings())) | 
					
						
							| 
									
										
										
										
											2019-05-27 19:24:46 +08:00
										 |  |  | 			} | 
					
						
							|  |  |  | 		}) | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | // Refer to https://github.com/prometheus/prometheus/issues/2651.
 | 
					
						
							|  |  |  | func TestFindSetMatches(t *testing.T) { | 
					
						
							|  |  |  | 	cases := []struct { | 
					
						
							|  |  |  | 		pattern string | 
					
						
							|  |  |  | 		exp     []string | 
					
						
							|  |  |  | 	}{ | 
					
						
							| 
									
										
										
										
											2021-06-01 15:57:48 +08:00
										 |  |  | 		// Single value, coming from a `bar=~"foo"` selector.
 | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			pattern: "^(?:foo)$", | 
					
						
							|  |  |  | 			exp: []string{ | 
					
						
							|  |  |  | 				"foo", | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							| 
									
										
										
										
											2019-05-27 19:24:46 +08:00
										 |  |  | 		// Simple sets.
 | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			pattern: "^(?:foo|bar|baz)$", | 
					
						
							|  |  |  | 			exp: []string{ | 
					
						
							|  |  |  | 				"foo", | 
					
						
							|  |  |  | 				"bar", | 
					
						
							|  |  |  | 				"baz", | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		// Simple sets containing escaped characters.
 | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			pattern: "^(?:fo\\.o|bar\\?|\\^baz)$", | 
					
						
							|  |  |  | 			exp: []string{ | 
					
						
							|  |  |  | 				"fo.o", | 
					
						
							|  |  |  | 				"bar?", | 
					
						
							|  |  |  | 				"^baz", | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		// Simple sets containing special characters without escaping.
 | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			pattern: "^(?:fo.o|bar?|^baz)$", | 
					
						
							|  |  |  | 			exp:     nil, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		// Missing wrapper.
 | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			pattern: "foo|bar|baz", | 
					
						
							|  |  |  | 			exp:     nil, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	for _, c := range cases { | 
					
						
							|  |  |  | 		matches := findSetMatches(c.pattern) | 
					
						
							|  |  |  | 		if len(c.exp) == 0 { | 
					
						
							|  |  |  | 			if len(matches) != 0 { | 
					
						
							|  |  |  | 				t.Errorf("Evaluating %s, unexpected result %v", c.pattern, matches) | 
					
						
							|  |  |  | 			} | 
					
						
							|  |  |  | 		} else { | 
					
						
							|  |  |  | 			if len(matches) != len(c.exp) { | 
					
						
							|  |  |  | 				t.Errorf("Evaluating %s, length of result not equal to exp", c.pattern) | 
					
						
							|  |  |  | 			} else { | 
					
						
							|  |  |  | 				for i := 0; i < len(c.exp); i++ { | 
					
						
							|  |  |  | 					if c.exp[i] != matches[i] { | 
					
						
							|  |  |  | 						t.Errorf("Evaluating %s, unexpected result %s", c.pattern, matches[i]) | 
					
						
							|  |  |  | 					} | 
					
						
							|  |  |  | 				} | 
					
						
							|  |  |  | 			} | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | func TestPostingsForMatchers(t *testing.T) { | 
					
						
							| 
									
										
										
										
											2020-05-06 23:30:00 +08:00
										 |  |  | 	chunkDir, err := ioutil.TempDir("", "chunk_dir") | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.NoError(t, err) | 
					
						
							| 
									
										
										
										
											2020-05-06 23:30:00 +08:00
										 |  |  | 	defer func() { | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 		require.NoError(t, os.RemoveAll(chunkDir)) | 
					
						
							| 
									
										
										
										
											2020-05-06 23:30:00 +08:00
										 |  |  | 	}() | 
					
						
							| 
									
										
										
										
											2021-02-09 22:12:48 +08:00
										 |  |  | 	opts := DefaultHeadOptions() | 
					
						
							|  |  |  | 	opts.ChunkRange = 1000 | 
					
						
							|  |  |  | 	opts.ChunkDirRoot = chunkDir | 
					
						
							| 
									
										
										
										
											2021-06-05 22:29:32 +08:00
										 |  |  | 	h, err := NewHead(nil, nil, nil, opts, nil) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.NoError(t, err) | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | 	defer func() { | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 		require.NoError(t, h.Close()) | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | 	}() | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-30 19:36:56 +08:00
										 |  |  | 	app := h.Appender(context.Background()) | 
					
						
							| 
									
										
										
										
											2021-02-18 20:07:00 +08:00
										 |  |  | 	app.Append(0, labels.FromStrings("n", "1"), 0, 0) | 
					
						
							|  |  |  | 	app.Append(0, labels.FromStrings("n", "1", "i", "a"), 0, 0) | 
					
						
							|  |  |  | 	app.Append(0, labels.FromStrings("n", "1", "i", "b"), 0, 0) | 
					
						
							|  |  |  | 	app.Append(0, labels.FromStrings("n", "2"), 0, 0) | 
					
						
							|  |  |  | 	app.Append(0, labels.FromStrings("n", "2.5"), 0, 0) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.NoError(t, app.Commit()) | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	cases := []struct { | 
					
						
							| 
									
										
										
										
											2019-11-19 03:53:33 +08:00
										 |  |  | 		matchers []*labels.Matcher | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | 		exp      []labels.Labels | 
					
						
							|  |  |  | 	}{ | 
					
						
							|  |  |  | 		// Simple equals.
 | 
					
						
							|  |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2019-11-19 03:53:33 +08:00
										 |  |  | 			matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "n", "1")}, | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | 			exp: []labels.Labels{ | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1"), | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1", "i", "a"), | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1", "i", "b"), | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2019-11-19 03:53:33 +08:00
										 |  |  | 			matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "n", "1"), labels.MustNewMatcher(labels.MatchEqual, "i", "a")}, | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | 			exp: []labels.Labels{ | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1", "i", "a"), | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2019-11-19 03:53:33 +08:00
										 |  |  | 			matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "n", "1"), labels.MustNewMatcher(labels.MatchEqual, "i", "missing")}, | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | 			exp:      []labels.Labels{}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2019-11-19 03:53:33 +08:00
										 |  |  | 			matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "missing", "")}, | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | 			exp: []labels.Labels{ | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1"), | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1", "i", "a"), | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1", "i", "b"), | 
					
						
							|  |  |  | 				labels.FromStrings("n", "2"), | 
					
						
							| 
									
										
										
										
											2019-05-27 19:24:46 +08:00
										 |  |  | 				labels.FromStrings("n", "2.5"), | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		// Not equals.
 | 
					
						
							|  |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2019-11-19 03:53:33 +08:00
										 |  |  | 			matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchNotEqual, "n", "1")}, | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | 			exp: []labels.Labels{ | 
					
						
							|  |  |  | 				labels.FromStrings("n", "2"), | 
					
						
							| 
									
										
										
										
											2019-05-27 19:24:46 +08:00
										 |  |  | 				labels.FromStrings("n", "2.5"), | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2019-11-19 03:53:33 +08:00
										 |  |  | 			matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchNotEqual, "i", "")}, | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | 			exp: []labels.Labels{ | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1", "i", "a"), | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1", "i", "b"), | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2019-11-19 03:53:33 +08:00
										 |  |  | 			matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchNotEqual, "missing", "")}, | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | 			exp:      []labels.Labels{}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2019-11-19 03:53:33 +08:00
										 |  |  | 			matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "n", "1"), labels.MustNewMatcher(labels.MatchNotEqual, "i", "a")}, | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | 			exp: []labels.Labels{ | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1"), | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1", "i", "b"), | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2019-11-19 03:53:33 +08:00
										 |  |  | 			matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "n", "1"), labels.MustNewMatcher(labels.MatchNotEqual, "i", "")}, | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | 			exp: []labels.Labels{ | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1", "i", "a"), | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1", "i", "b"), | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		// Regex.
 | 
					
						
							|  |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2019-11-19 03:53:33 +08:00
										 |  |  | 			matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "n", "^1$")}, | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | 			exp: []labels.Labels{ | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1"), | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1", "i", "a"), | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1", "i", "b"), | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2019-11-19 03:53:33 +08:00
										 |  |  | 			matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "n", "1"), labels.MustNewMatcher(labels.MatchRegexp, "i", "^a$")}, | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | 			exp: []labels.Labels{ | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1", "i", "a"), | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2019-11-19 03:53:33 +08:00
										 |  |  | 			matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "n", "1"), labels.MustNewMatcher(labels.MatchRegexp, "i", "^a?$")}, | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | 			exp: []labels.Labels{ | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1"), | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1", "i", "a"), | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2019-11-19 03:53:33 +08:00
										 |  |  | 			matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "i", "^$")}, | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | 			exp: []labels.Labels{ | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1"), | 
					
						
							|  |  |  | 				labels.FromStrings("n", "2"), | 
					
						
							| 
									
										
										
										
											2019-05-27 19:24:46 +08:00
										 |  |  | 				labels.FromStrings("n", "2.5"), | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2019-11-19 03:53:33 +08:00
										 |  |  | 			matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "n", "1"), labels.MustNewMatcher(labels.MatchRegexp, "i", "^$")}, | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | 			exp: []labels.Labels{ | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1"), | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2019-11-19 03:53:33 +08:00
										 |  |  | 			matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "n", "1"), labels.MustNewMatcher(labels.MatchRegexp, "i", "^.*$")}, | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | 			exp: []labels.Labels{ | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1"), | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1", "i", "a"), | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1", "i", "b"), | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2019-11-19 03:53:33 +08:00
										 |  |  | 			matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "n", "1"), labels.MustNewMatcher(labels.MatchRegexp, "i", "^.+$")}, | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | 			exp: []labels.Labels{ | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1", "i", "a"), | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1", "i", "b"), | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		// Not regex.
 | 
					
						
							|  |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2019-11-19 03:53:33 +08:00
										 |  |  | 			matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchNotRegexp, "n", "^1$")}, | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | 			exp: []labels.Labels{ | 
					
						
							|  |  |  | 				labels.FromStrings("n", "2"), | 
					
						
							| 
									
										
										
										
											2019-05-27 19:24:46 +08:00
										 |  |  | 				labels.FromStrings("n", "2.5"), | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2019-11-19 03:53:33 +08:00
										 |  |  | 			matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "n", "1"), labels.MustNewMatcher(labels.MatchNotRegexp, "i", "^a$")}, | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | 			exp: []labels.Labels{ | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1"), | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1", "i", "b"), | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2019-11-19 03:53:33 +08:00
										 |  |  | 			matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "n", "1"), labels.MustNewMatcher(labels.MatchNotRegexp, "i", "^a?$")}, | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | 			exp: []labels.Labels{ | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1", "i", "b"), | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2019-11-19 03:53:33 +08:00
										 |  |  | 			matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "n", "1"), labels.MustNewMatcher(labels.MatchNotRegexp, "i", "^$")}, | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | 			exp: []labels.Labels{ | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1", "i", "a"), | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1", "i", "b"), | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2019-11-19 03:53:33 +08:00
										 |  |  | 			matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "n", "1"), labels.MustNewMatcher(labels.MatchNotRegexp, "i", "^.*$")}, | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | 			exp:      []labels.Labels{}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2019-11-19 03:53:33 +08:00
										 |  |  | 			matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "n", "1"), labels.MustNewMatcher(labels.MatchNotRegexp, "i", "^.+$")}, | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | 			exp: []labels.Labels{ | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1"), | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		// Combinations.
 | 
					
						
							|  |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2019-11-19 03:53:33 +08:00
										 |  |  | 			matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "n", "1"), labels.MustNewMatcher(labels.MatchNotEqual, "i", ""), labels.MustNewMatcher(labels.MatchEqual, "i", "a")}, | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | 			exp: []labels.Labels{ | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1", "i", "a"), | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2019-11-19 03:53:33 +08:00
										 |  |  | 			matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "n", "1"), labels.MustNewMatcher(labels.MatchNotEqual, "i", "b"), labels.MustNewMatcher(labels.MatchRegexp, "i", "^(b|a).*$")}, | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | 			exp: []labels.Labels{ | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1", "i", "a"), | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							| 
									
										
										
										
											2019-05-27 19:24:46 +08:00
										 |  |  | 		// Set optimization for Regex.
 | 
					
						
							|  |  |  | 		// Refer to https://github.com/prometheus/prometheus/issues/2651.
 | 
					
						
							|  |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2020-02-05 18:53:12 +08:00
										 |  |  | 			matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "n", "1|2")}, | 
					
						
							| 
									
										
										
										
											2019-05-27 19:24:46 +08:00
										 |  |  | 			exp: []labels.Labels{ | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1"), | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1", "i", "a"), | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1", "i", "b"), | 
					
						
							|  |  |  | 				labels.FromStrings("n", "2"), | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2020-02-05 18:53:12 +08:00
										 |  |  | 			matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "i", "a|b")}, | 
					
						
							| 
									
										
										
										
											2019-05-27 19:24:46 +08:00
										 |  |  | 			exp: []labels.Labels{ | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1", "i", "a"), | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1", "i", "b"), | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2020-02-05 18:53:12 +08:00
										 |  |  | 			matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "n", "x1|2")}, | 
					
						
							| 
									
										
										
										
											2019-05-27 19:24:46 +08:00
										 |  |  | 			exp: []labels.Labels{ | 
					
						
							|  |  |  | 				labels.FromStrings("n", "2"), | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2020-02-05 18:53:12 +08:00
										 |  |  | 			matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "n", "2|2\\.5")}, | 
					
						
							| 
									
										
										
										
											2019-05-27 19:24:46 +08:00
										 |  |  | 			exp: []labels.Labels{ | 
					
						
							|  |  |  | 				labels.FromStrings("n", "2"), | 
					
						
							|  |  |  | 				labels.FromStrings("n", "2.5"), | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		// Empty value.
 | 
					
						
							|  |  |  | 		{ | 
					
						
							| 
									
										
										
										
											2020-02-05 18:53:12 +08:00
										 |  |  | 			matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "i", "c||d")}, | 
					
						
							| 
									
										
										
										
											2019-05-27 19:24:46 +08:00
										 |  |  | 			exp: []labels.Labels{ | 
					
						
							|  |  |  | 				labels.FromStrings("n", "1"), | 
					
						
							|  |  |  | 				labels.FromStrings("n", "2"), | 
					
						
							|  |  |  | 				labels.FromStrings("n", "2.5"), | 
					
						
							|  |  |  | 			}, | 
					
						
							|  |  |  | 		}, | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-03-26 03:13:47 +08:00
										 |  |  | 	ir, err := h.Index() | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.NoError(t, err) | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	for _, c := range cases { | 
					
						
							|  |  |  | 		exp := map[string]struct{}{} | 
					
						
							|  |  |  | 		for _, l := range c.exp { | 
					
						
							|  |  |  | 			exp[l.String()] = struct{}{} | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 		p, err := PostingsForMatchers(ir, c.matchers...) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 		require.NoError(t, err) | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 		for p.Next() { | 
					
						
							|  |  |  | 			lbls := labels.Labels{} | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 			require.NoError(t, ir.Series(p.At(), &lbls, &[]chunks.Meta{})) | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | 			if _, ok := exp[lbls.String()]; !ok { | 
					
						
							|  |  |  | 				t.Errorf("Evaluating %v, unexpected result %s", c.matchers, lbls.String()) | 
					
						
							|  |  |  | 			} else { | 
					
						
							|  |  |  | 				delete(exp, lbls.String()) | 
					
						
							|  |  |  | 			} | 
					
						
							|  |  |  | 		} | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 		require.NoError(t, p.Err()) | 
					
						
							| 
									
										
											  
											
												 Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark                                                            old ns/op     new ns/op     delta
BenchmarkHeadPostingForMatchers/n="1"-4                              5888          6160          +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4                      7190          6640          -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4                      6038          5923          -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4                     6030884       4850525       -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4                            887377940     230329137     -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4                            490316101     319931758     -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4                              594961991     130279313     -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4                              537542388     318751015     -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4              10460243      8565195       -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4       44964267      8561546       -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4                42244885      29137737      -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4              35285834      32774584      -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4             8951047       8379024       -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4       63813335      30672688      -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4     45381112      44924397      -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
											
										 
											2019-04-09 18:59:45 +08:00
										 |  |  | 		if len(exp) != 0 { | 
					
						
							|  |  |  | 			t.Errorf("Evaluating %v, missing results %+v", c.matchers, exp) | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | } | 
					
						
							| 
									
										
										
										
											2019-04-30 15:17:07 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | // TestClose ensures that calling Close more than once doesn't block and doesn't panic.
 | 
					
						
							|  |  |  | func TestClose(t *testing.T) { | 
					
						
							|  |  |  | 	dir, err := ioutil.TempDir("", "test_storage") | 
					
						
							|  |  |  | 	if err != nil { | 
					
						
							|  |  |  | 		t.Fatalf("Opening test dir failed: %s", err) | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 	defer func() { | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 		require.NoError(t, os.RemoveAll(dir)) | 
					
						
							| 
									
										
										
										
											2019-04-30 15:17:07 +08:00
										 |  |  | 	}() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	createBlock(t, dir, genSeries(1, 1, 0, 10)) | 
					
						
							|  |  |  | 	createBlock(t, dir, genSeries(1, 1, 10, 20)) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-06-05 22:29:32 +08:00
										 |  |  | 	db, err := Open(dir, nil, nil, DefaultOptions(), nil) | 
					
						
							| 
									
										
										
										
											2019-04-30 15:17:07 +08:00
										 |  |  | 	if err != nil { | 
					
						
							|  |  |  | 		t.Fatalf("Opening test storage failed: %s", err) | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 	defer func() { | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 		require.NoError(t, db.Close()) | 
					
						
							| 
									
										
										
										
											2019-04-30 15:17:07 +08:00
										 |  |  | 	}() | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-02-06 23:58:38 +08:00
										 |  |  | 	q, err := db.Querier(context.TODO(), 0, 20) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 	require.NoError(t, err) | 
					
						
							|  |  |  | 	require.NoError(t, q.Close()) | 
					
						
							|  |  |  | 	require.Error(t, q.Close()) | 
					
						
							| 
									
										
										
										
											2019-04-30 15:17:07 +08:00
										 |  |  | } | 
					
						
							| 
									
										
										
										
											2019-06-07 21:41:44 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | func BenchmarkQueries(b *testing.B) { | 
					
						
							|  |  |  | 	cases := map[string]labels.Selector{ | 
					
						
							| 
									
										
										
										
											2019-08-13 16:34:14 +08:00
										 |  |  | 		"Eq Matcher: Expansion - 1": { | 
					
						
							| 
									
										
										
										
											2019-11-19 03:53:33 +08:00
										 |  |  | 			labels.MustNewMatcher(labels.MatchEqual, "la", "va"), | 
					
						
							| 
									
										
										
										
											2019-06-07 21:41:44 +08:00
										 |  |  | 		}, | 
					
						
							| 
									
										
										
										
											2019-08-13 16:34:14 +08:00
										 |  |  | 		"Eq Matcher: Expansion - 2": { | 
					
						
							| 
									
										
										
										
											2019-11-19 03:53:33 +08:00
										 |  |  | 			labels.MustNewMatcher(labels.MatchEqual, "la", "va"), | 
					
						
							|  |  |  | 			labels.MustNewMatcher(labels.MatchEqual, "lb", "vb"), | 
					
						
							| 
									
										
										
										
											2019-06-07 21:41:44 +08:00
										 |  |  | 		}, | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-08-13 16:34:14 +08:00
										 |  |  | 		"Eq Matcher: Expansion - 3": { | 
					
						
							| 
									
										
										
										
											2019-11-19 03:53:33 +08:00
										 |  |  | 			labels.MustNewMatcher(labels.MatchEqual, "la", "va"), | 
					
						
							|  |  |  | 			labels.MustNewMatcher(labels.MatchEqual, "lb", "vb"), | 
					
						
							|  |  |  | 			labels.MustNewMatcher(labels.MatchEqual, "lc", "vc"), | 
					
						
							| 
									
										
										
										
											2019-06-07 21:41:44 +08:00
										 |  |  | 		}, | 
					
						
							| 
									
										
										
										
											2019-08-13 16:34:14 +08:00
										 |  |  | 		"Regex Matcher: Expansion - 1": { | 
					
						
							| 
									
										
										
										
											2019-11-19 03:53:33 +08:00
										 |  |  | 			labels.MustNewMatcher(labels.MatchRegexp, "la", ".*va"), | 
					
						
							| 
									
										
										
										
											2019-06-07 21:41:44 +08:00
										 |  |  | 		}, | 
					
						
							| 
									
										
										
										
											2019-08-13 16:34:14 +08:00
										 |  |  | 		"Regex Matcher: Expansion - 2": { | 
					
						
							| 
									
										
										
										
											2019-11-19 03:53:33 +08:00
										 |  |  | 			labels.MustNewMatcher(labels.MatchRegexp, "la", ".*va"), | 
					
						
							|  |  |  | 			labels.MustNewMatcher(labels.MatchRegexp, "lb", ".*vb"), | 
					
						
							| 
									
										
										
										
											2019-06-07 21:41:44 +08:00
										 |  |  | 		}, | 
					
						
							| 
									
										
										
										
											2019-08-13 16:34:14 +08:00
										 |  |  | 		"Regex Matcher: Expansion - 3": { | 
					
						
							| 
									
										
										
										
											2019-11-19 03:53:33 +08:00
										 |  |  | 			labels.MustNewMatcher(labels.MatchRegexp, "la", ".*va"), | 
					
						
							|  |  |  | 			labels.MustNewMatcher(labels.MatchRegexp, "lb", ".*vb"), | 
					
						
							|  |  |  | 			labels.MustNewMatcher(labels.MatchRegexp, "lc", ".*vc"), | 
					
						
							| 
									
										
										
										
											2019-06-07 21:41:44 +08:00
										 |  |  | 		}, | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-02-06 23:58:38 +08:00
										 |  |  | 	queryTypes := make(map[string]storage.Querier) | 
					
						
							| 
									
										
										
										
											2019-06-07 21:41:44 +08:00
										 |  |  | 	defer func() { | 
					
						
							|  |  |  | 		for _, q := range queryTypes { | 
					
						
							|  |  |  | 			// Can't run a check for error here as some of these will fail as
 | 
					
						
							|  |  |  | 			// queryTypes is using the same slice for the different block queriers
 | 
					
						
							| 
									
										
										
										
											2020-01-02 22:54:09 +08:00
										 |  |  | 			// and would have been closed in the previous iteration.
 | 
					
						
							| 
									
										
										
										
											2019-06-07 21:41:44 +08:00
										 |  |  | 			q.Close() | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 	}() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	for title, selectors := range cases { | 
					
						
							|  |  |  | 		for _, nSeries := range []int{10} { | 
					
						
							|  |  |  | 			for _, nSamples := range []int64{1000, 10000, 100000} { | 
					
						
							|  |  |  | 				dir, err := ioutil.TempDir("", "test_persisted_query") | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 				require.NoError(b, err) | 
					
						
							| 
									
										
										
										
											2019-06-07 21:41:44 +08:00
										 |  |  | 				defer func() { | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 					require.NoError(b, os.RemoveAll(dir)) | 
					
						
							| 
									
										
										
										
											2019-06-07 21:41:44 +08:00
										 |  |  | 				}() | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 				series := genSeries(nSeries, 5, 1, nSamples) | 
					
						
							| 
									
										
										
										
											2019-06-07 21:41:44 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 				// Add some common labels to make the matchers select these series.
 | 
					
						
							|  |  |  | 				{ | 
					
						
							|  |  |  | 					var commonLbls labels.Labels | 
					
						
							|  |  |  | 					for _, selector := range selectors { | 
					
						
							| 
									
										
										
										
											2019-11-19 03:53:33 +08:00
										 |  |  | 						switch selector.Type { | 
					
						
							|  |  |  | 						case labels.MatchEqual: | 
					
						
							|  |  |  | 							commonLbls = append(commonLbls, labels.Label{Name: selector.Name, Value: selector.Value}) | 
					
						
							|  |  |  | 						case labels.MatchRegexp: | 
					
						
							|  |  |  | 							commonLbls = append(commonLbls, labels.Label{Name: selector.Name, Value: selector.Value}) | 
					
						
							| 
									
										
										
										
											2019-06-07 21:41:44 +08:00
										 |  |  | 						} | 
					
						
							|  |  |  | 					} | 
					
						
							|  |  |  | 					for i := range commonLbls { | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 						s := series[i].(*storage.SeriesEntry) | 
					
						
							| 
									
										
										
										
											2019-06-07 21:41:44 +08:00
										 |  |  | 						allLabels := append(commonLbls, s.Labels()...) | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 						newS := storage.NewListSeries(allLabels, nil) | 
					
						
							|  |  |  | 						newS.SampleIteratorFn = s.SampleIteratorFn | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 						series[i] = newS | 
					
						
							| 
									
										
										
										
											2019-06-07 21:41:44 +08:00
										 |  |  | 					} | 
					
						
							|  |  |  | 				} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-02-06 23:58:38 +08:00
										 |  |  | 				qs := make([]storage.Querier, 0, 10) | 
					
						
							| 
									
										
										
										
											2019-06-07 21:41:44 +08:00
										 |  |  | 				for x := 0; x <= 10; x++ { | 
					
						
							|  |  |  | 					block, err := OpenBlock(nil, createBlock(b, dir, series), nil) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 					require.NoError(b, err) | 
					
						
							| 
									
										
										
										
											2019-06-07 21:41:44 +08:00
										 |  |  | 					q, err := NewBlockQuerier(block, 1, int64(nSamples)) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 					require.NoError(b, err) | 
					
						
							| 
									
										
										
										
											2019-06-07 21:41:44 +08:00
										 |  |  | 					qs = append(qs, q) | 
					
						
							|  |  |  | 				} | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 				queryTypes["_1-Block"] = storage.NewMergeQuerier(qs[:1], nil, storage.ChainedSeriesMerge) | 
					
						
							|  |  |  | 				queryTypes["_3-Blocks"] = storage.NewMergeQuerier(qs[0:3], nil, storage.ChainedSeriesMerge) | 
					
						
							|  |  |  | 				queryTypes["_10-Blocks"] = storage.NewMergeQuerier(qs, nil, storage.ChainedSeriesMerge) | 
					
						
							| 
									
										
										
										
											2019-06-07 21:41:44 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-05-06 23:30:00 +08:00
										 |  |  | 				chunkDir, err := ioutil.TempDir("", "chunk_dir") | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 				require.NoError(b, err) | 
					
						
							| 
									
										
										
										
											2020-05-06 23:30:00 +08:00
										 |  |  | 				defer func() { | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 					require.NoError(b, os.RemoveAll(chunkDir)) | 
					
						
							| 
									
										
										
										
											2020-05-06 23:30:00 +08:00
										 |  |  | 				}() | 
					
						
							| 
									
										
										
										
											2020-07-31 23:03:02 +08:00
										 |  |  | 				head := createHead(b, nil, series, chunkDir) | 
					
						
							|  |  |  | 				qHead, err := NewBlockQuerier(head, 1, nSamples) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 				require.NoError(b, err) | 
					
						
							| 
									
										
										
										
											2019-06-07 21:41:44 +08:00
										 |  |  | 				queryTypes["_Head"] = qHead | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 				for qtype, querier := range queryTypes { | 
					
						
							|  |  |  | 					b.Run(title+qtype+"_nSeries:"+strconv.Itoa(nSeries)+"_nSamples:"+strconv.Itoa(int(nSamples)), func(b *testing.B) { | 
					
						
							|  |  |  | 						expExpansions, err := strconv.Atoi(string(title[len(title)-1])) | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 						require.NoError(b, err) | 
					
						
							| 
									
										
										
										
											2019-06-07 21:41:44 +08:00
										 |  |  | 						benchQuery(b, expExpansions, querier, selectors) | 
					
						
							|  |  |  | 					}) | 
					
						
							|  |  |  | 				} | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 				require.NoError(b, head.Close()) | 
					
						
							| 
									
										
										
										
											2019-06-07 21:41:44 +08:00
										 |  |  | 			} | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-02-06 23:58:38 +08:00
										 |  |  | func benchQuery(b *testing.B, expExpansions int, q storage.Querier, selectors labels.Selector) { | 
					
						
							| 
									
										
										
										
											2019-06-07 21:41:44 +08:00
										 |  |  | 	b.ResetTimer() | 
					
						
							|  |  |  | 	b.ReportAllocs() | 
					
						
							|  |  |  | 	for i := 0; i < b.N; i++ { | 
					
						
							| 
									
										
										
										
											2020-06-10 00:57:31 +08:00
										 |  |  | 		ss := q.Select(false, nil, selectors...) | 
					
						
							| 
									
										
										
										
											2019-06-07 21:41:44 +08:00
										 |  |  | 		var actualExpansions int | 
					
						
							|  |  |  | 		for ss.Next() { | 
					
						
							|  |  |  | 			s := ss.At() | 
					
						
							|  |  |  | 			s.Labels() | 
					
						
							|  |  |  | 			it := s.Iterator() | 
					
						
							|  |  |  | 			for it.Next() { | 
					
						
							|  |  |  | 			} | 
					
						
							|  |  |  | 			actualExpansions++ | 
					
						
							|  |  |  | 		} | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 		require.NoError(b, ss.Err()) | 
					
						
							|  |  |  | 		require.Equal(b, 0, len(ss.Warnings())) | 
					
						
							|  |  |  | 		require.Equal(b, expExpansions, actualExpansions) | 
					
						
							|  |  |  | 		require.NoError(b, ss.Err()) | 
					
						
							| 
									
										
										
										
											2019-06-07 21:41:44 +08:00
										 |  |  | 	} | 
					
						
							|  |  |  | } | 
					
						
							| 
									
										
										
										
											2020-02-05 18:53:12 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | // mockMatcherIndex is used to check if the regex matcher works as expected.
 | 
					
						
							|  |  |  | type mockMatcherIndex struct{} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | func (m mockMatcherIndex) Symbols() index.StringIter { return nil } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | func (m mockMatcherIndex) Close() error { return nil } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-06-25 21:10:29 +08:00
										 |  |  | // SortedLabelValues will return error if it is called.
 | 
					
						
							| 
									
										
										
										
											2021-02-10 01:38:35 +08:00
										 |  |  | func (m mockMatcherIndex) SortedLabelValues(name string, matchers ...*labels.Matcher) ([]string, error) { | 
					
						
							| 
									
										
										
										
											2020-06-25 21:10:29 +08:00
										 |  |  | 	return []string{}, errors.New("sorted label values called") | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-02-05 18:53:12 +08:00
										 |  |  | // LabelValues will return error if it is called.
 | 
					
						
							| 
									
										
										
										
											2021-02-10 01:38:35 +08:00
										 |  |  | func (m mockMatcherIndex) LabelValues(name string, matchers ...*labels.Matcher) ([]string, error) { | 
					
						
							| 
									
										
										
										
											2020-02-05 18:53:12 +08:00
										 |  |  | 	return []string{}, errors.New("label values called") | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-11-06 18:10:04 +08:00
										 |  |  | func (m mockMatcherIndex) LabelValueFor(id storage.SeriesRef, label string) (string, error) { | 
					
						
							| 
									
										
										
										
											2021-02-10 01:38:35 +08:00
										 |  |  | 	return "", errors.New("label value for called") | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-11-06 18:10:04 +08:00
										 |  |  | func (m mockMatcherIndex) LabelNamesFor(ids ...storage.SeriesRef) ([]string, error) { | 
					
						
							| 
									
										
										
										
											2021-07-20 20:38:08 +08:00
										 |  |  | 	return nil, errors.New("label names for for called") | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-02-05 18:53:12 +08:00
										 |  |  | func (m mockMatcherIndex) Postings(name string, values ...string) (index.Postings, error) { | 
					
						
							|  |  |  | 	return index.EmptyPostings(), nil | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | func (m mockMatcherIndex) SortedPostings(p index.Postings) index.Postings { | 
					
						
							|  |  |  | 	return index.EmptyPostings() | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-11-06 18:10:04 +08:00
										 |  |  | func (m mockMatcherIndex) Series(ref storage.SeriesRef, lset *labels.Labels, chks *[]chunks.Meta) error { | 
					
						
							| 
									
										
										
										
											2020-02-05 18:53:12 +08:00
										 |  |  | 	return nil | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-07-20 20:38:08 +08:00
										 |  |  | func (m mockMatcherIndex) LabelNames(...*labels.Matcher) ([]string, error) { | 
					
						
							|  |  |  | 	return []string{}, nil | 
					
						
							|  |  |  | } | 
					
						
							| 
									
										
										
										
											2020-02-05 18:53:12 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | func TestPostingsForMatcher(t *testing.T) { | 
					
						
							|  |  |  | 	cases := []struct { | 
					
						
							|  |  |  | 		matcher  *labels.Matcher | 
					
						
							|  |  |  | 		hasError bool | 
					
						
							|  |  |  | 	}{ | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			// Equal label matcher will just return.
 | 
					
						
							|  |  |  | 			matcher:  labels.MustNewMatcher(labels.MatchEqual, "test", "test"), | 
					
						
							|  |  |  | 			hasError: false, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			// Regex matcher which doesn't have '|' will call Labelvalues()
 | 
					
						
							|  |  |  | 			matcher:  labels.MustNewMatcher(labels.MatchRegexp, "test", ".*"), | 
					
						
							|  |  |  | 			hasError: true, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			matcher:  labels.MustNewMatcher(labels.MatchRegexp, "test", "a|b"), | 
					
						
							|  |  |  | 			hasError: false, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			// Test case for double quoted regex matcher
 | 
					
						
							|  |  |  | 			matcher:  labels.MustNewMatcher(labels.MatchRegexp, "test", "^(?:a|b)$"), | 
					
						
							|  |  |  | 			hasError: true, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	for _, tc := range cases { | 
					
						
							|  |  |  | 		ir := &mockMatcherIndex{} | 
					
						
							|  |  |  | 		_, err := postingsForMatcher(ir, tc.matcher) | 
					
						
							|  |  |  | 		if tc.hasError { | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 			require.Error(t, err) | 
					
						
							| 
									
										
										
										
											2020-02-05 18:53:12 +08:00
										 |  |  | 		} else { | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 			require.NoError(t, err) | 
					
						
							| 
									
										
										
										
											2020-02-05 18:53:12 +08:00
										 |  |  | 		} | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | } | 
					
						
							| 
									
										
										
										
											2020-08-03 18:32:56 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | func TestBlockBaseSeriesSet(t *testing.T) { | 
					
						
							|  |  |  | 	type refdSeries struct { | 
					
						
							|  |  |  | 		lset   labels.Labels | 
					
						
							|  |  |  | 		chunks []chunks.Meta | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-11-06 18:10:04 +08:00
										 |  |  | 		ref storage.SeriesRef | 
					
						
							| 
									
										
										
										
											2020-08-03 18:32:56 +08:00
										 |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	cases := []struct { | 
					
						
							|  |  |  | 		series []refdSeries | 
					
						
							|  |  |  | 		// Postings should be in the sorted order of the series
 | 
					
						
							| 
									
										
										
										
											2021-11-06 18:10:04 +08:00
										 |  |  | 		postings []storage.SeriesRef | 
					
						
							| 
									
										
										
										
											2020-08-03 18:32:56 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 		expIdxs []int | 
					
						
							|  |  |  | 	}{ | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			series: []refdSeries{ | 
					
						
							|  |  |  | 				{ | 
					
						
							|  |  |  | 					lset: labels.New([]labels.Label{{Name: "a", Value: "a"}}...), | 
					
						
							|  |  |  | 					chunks: []chunks.Meta{ | 
					
						
							| 
									
										
										
										
											2021-10-22 16:06:44 +08:00
										 |  |  | 						{Ref: 29}, | 
					
						
							|  |  |  | 						{Ref: 45}, | 
					
						
							|  |  |  | 						{Ref: 245}, | 
					
						
							|  |  |  | 						{Ref: 123}, | 
					
						
							|  |  |  | 						{Ref: 4232}, | 
					
						
							|  |  |  | 						{Ref: 5344}, | 
					
						
							| 
									
										
										
										
											2020-08-03 18:32:56 +08:00
										 |  |  | 						{Ref: 121}, | 
					
						
							|  |  |  | 					}, | 
					
						
							|  |  |  | 					ref: 12, | 
					
						
							|  |  |  | 				}, | 
					
						
							|  |  |  | 				{ | 
					
						
							|  |  |  | 					lset: labels.New([]labels.Label{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}...), | 
					
						
							|  |  |  | 					chunks: []chunks.Meta{ | 
					
						
							|  |  |  | 						{Ref: 82}, {Ref: 23}, {Ref: 234}, {Ref: 65}, {Ref: 26}, | 
					
						
							|  |  |  | 					}, | 
					
						
							|  |  |  | 					ref: 10, | 
					
						
							|  |  |  | 				}, | 
					
						
							|  |  |  | 				{ | 
					
						
							|  |  |  | 					lset:   labels.New([]labels.Label{{Name: "b", Value: "c"}}...), | 
					
						
							|  |  |  | 					chunks: []chunks.Meta{{Ref: 8282}}, | 
					
						
							|  |  |  | 					ref:    1, | 
					
						
							|  |  |  | 				}, | 
					
						
							|  |  |  | 				{ | 
					
						
							|  |  |  | 					lset: labels.New([]labels.Label{{Name: "b", Value: "b"}}...), | 
					
						
							|  |  |  | 					chunks: []chunks.Meta{ | 
					
						
							|  |  |  | 						{Ref: 829}, {Ref: 239}, {Ref: 2349}, {Ref: 659}, {Ref: 269}, | 
					
						
							|  |  |  | 					}, | 
					
						
							|  |  |  | 					ref: 108, | 
					
						
							|  |  |  | 				}, | 
					
						
							|  |  |  | 			}, | 
					
						
							| 
									
										
										
										
											2021-11-06 18:10:04 +08:00
										 |  |  | 			postings: []storage.SeriesRef{12, 13, 10, 108}, // 13 doesn't exist and should just be skipped over.
 | 
					
						
							| 
									
										
										
										
											2020-08-03 18:32:56 +08:00
										 |  |  | 			expIdxs:  []int{0, 1, 3}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 		{ | 
					
						
							|  |  |  | 			series: []refdSeries{ | 
					
						
							|  |  |  | 				{ | 
					
						
							|  |  |  | 					lset: labels.New([]labels.Label{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}...), | 
					
						
							|  |  |  | 					chunks: []chunks.Meta{ | 
					
						
							|  |  |  | 						{Ref: 82}, {Ref: 23}, {Ref: 234}, {Ref: 65}, {Ref: 26}, | 
					
						
							|  |  |  | 					}, | 
					
						
							|  |  |  | 					ref: 10, | 
					
						
							|  |  |  | 				}, | 
					
						
							|  |  |  | 				{ | 
					
						
							|  |  |  | 					lset:   labels.New([]labels.Label{{Name: "b", Value: "c"}}...), | 
					
						
							|  |  |  | 					chunks: []chunks.Meta{{Ref: 8282}}, | 
					
						
							|  |  |  | 					ref:    3, | 
					
						
							|  |  |  | 				}, | 
					
						
							|  |  |  | 			}, | 
					
						
							| 
									
										
										
										
											2021-11-06 18:10:04 +08:00
										 |  |  | 			postings: []storage.SeriesRef{}, | 
					
						
							| 
									
										
										
										
											2020-08-03 18:32:56 +08:00
										 |  |  | 			expIdxs:  []int{}, | 
					
						
							|  |  |  | 		}, | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	for _, tc := range cases { | 
					
						
							|  |  |  | 		mi := newMockIndex() | 
					
						
							|  |  |  | 		for _, s := range tc.series { | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 			require.NoError(t, mi.AddSeries(s.ref, s.lset, s.chunks...)) | 
					
						
							| 
									
										
										
										
											2020-08-03 18:32:56 +08:00
										 |  |  | 		} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 		bcs := &blockBaseSeriesSet{ | 
					
						
							|  |  |  | 			p:          index.NewListPostings(tc.postings), | 
					
						
							|  |  |  | 			index:      mi, | 
					
						
							|  |  |  | 			tombstones: tombstones.NewMemTombstones(), | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 		i := 0 | 
					
						
							|  |  |  | 		for bcs.Next() { | 
					
						
							|  |  |  | 			chks := bcs.currIterFn().chks | 
					
						
							|  |  |  | 			idx := tc.expIdxs[i] | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 			require.Equal(t, tc.series[idx].lset, bcs.currLabels) | 
					
						
							|  |  |  | 			require.Equal(t, tc.series[idx].chunks, chks) | 
					
						
							| 
									
										
										
										
											2020-08-03 18:32:56 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 			i++ | 
					
						
							|  |  |  | 		} | 
					
						
							| 
									
										
										
										
											2020-10-29 17:43:23 +08:00
										 |  |  | 		require.Equal(t, len(tc.expIdxs), i) | 
					
						
							|  |  |  | 		require.NoError(t, bcs.Err()) | 
					
						
							| 
									
										
										
										
											2020-08-03 18:32:56 +08:00
										 |  |  | 	} | 
					
						
							|  |  |  | } |