Merge pull request #17840 from prometheus/krajo/st-in-chunks

feat(tsdb): new Append parameter and AtST interface for chunks
This commit is contained in:
George Krajcsovits 2026-01-16 10:32:53 +01:00 committed by GitHub
commit ef350b2b54
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
35 changed files with 1011 additions and 803 deletions

View file

@ -3747,12 +3747,12 @@ func TestHistogramRateWithFloatStaleness(t *testing.T) {
recoded bool
)
newc, recoded, app, err = app.AppendHistogram(nil, 0, h1.Copy(), false)
newc, recoded, app, err = app.AppendHistogram(nil, 0, 0, h1.Copy(), false)
require.NoError(t, err)
require.False(t, recoded)
require.Nil(t, newc)
newc, recoded, _, err = app.AppendHistogram(nil, 10, h1.Copy(), false)
newc, recoded, _, err = app.AppendHistogram(nil, 0, 10, h1.Copy(), false)
require.NoError(t, err)
require.False(t, recoded)
require.Nil(t, newc)
@ -3762,7 +3762,7 @@ func TestHistogramRateWithFloatStaleness(t *testing.T) {
app, err = c2.Appender()
require.NoError(t, err)
app.Append(20, math.Float64frombits(value.StaleNaN))
app.Append(0, 20, math.Float64frombits(value.StaleNaN))
// Make a chunk with two normal histograms that have zero value.
h2 := histogram.Histogram{
@ -3773,12 +3773,12 @@ func TestHistogramRateWithFloatStaleness(t *testing.T) {
app, err = c3.Appender()
require.NoError(t, err)
newc, recoded, app, err = app.AppendHistogram(nil, 30, h2.Copy(), false)
newc, recoded, app, err = app.AppendHistogram(nil, 0, 30, h2.Copy(), false)
require.NoError(t, err)
require.False(t, recoded)
require.Nil(t, newc)
newc, recoded, _, err = app.AppendHistogram(nil, 40, h2.Copy(), false)
newc, recoded, _, err = app.AppendHistogram(nil, 0, 40, h2.Copy(), false)
require.NoError(t, err)
require.False(t, recoded)
require.Nil(t, newc)

View file

@ -235,4 +235,6 @@ func (h *histogramIterator) AtFloatHistogram(*histogram.FloatHistogram) (int64,
func (*histogramIterator) AtT() int64 { return 0 }
func (*histogramIterator) AtST() int64 { return 0 }
func (*histogramIterator) Err() error { return nil }

View file

@ -487,6 +487,11 @@ func (ssi *storageSeriesIterator) AtT() int64 {
return ssi.currT
}
// TODO(krajorama): implement AtST.
func (*storageSeriesIterator) AtST() int64 {
return 0
}
func (ssi *storageSeriesIterator) Next() chunkenc.ValueType {
if ssi.currH != nil {
ssi.iHistograms++

View file

@ -697,12 +697,14 @@ func TestQueryForStateSeries(t *testing.T) {
{
selectMockFunction: func(bool, *storage.SelectHints, ...*labels.Matcher) storage.SeriesSet {
return storage.TestSeriesSet(storage.MockSeries(
nil,
[]int64{1, 2, 3},
[]float64{1, 2, 3},
[]string{"__name__", "ALERTS_FOR_STATE", "alertname", "TestRule", "severity", "critical"},
))
},
expectedSeries: storage.MockSeries(
nil,
[]int64{1, 2, 3},
[]float64{1, 2, 3},
[]string{"__name__", "ALERTS_FOR_STATE", "alertname", "TestRule", "severity", "critical"},

View file

@ -119,13 +119,16 @@ func (b *BufferedSeriesIterator) Next() chunkenc.ValueType {
return chunkenc.ValNone
case chunkenc.ValFloat:
t, f := b.it.At()
b.buf.addF(fSample{t: t, f: f})
st := b.it.AtST()
b.buf.addF(fSample{st: st, t: t, f: f})
case chunkenc.ValHistogram:
t, h := b.it.AtHistogram(&b.hReader)
b.buf.addH(hSample{t: t, h: h})
st := b.it.AtST()
b.buf.addH(hSample{st: st, t: t, h: h})
case chunkenc.ValFloatHistogram:
t, fh := b.it.AtFloatHistogram(&b.fhReader)
b.buf.addFH(fhSample{t: t, fh: fh})
st := b.it.AtST()
b.buf.addFH(fhSample{st: st, t: t, fh: fh})
default:
panic(fmt.Errorf("BufferedSeriesIterator: unknown value type %v", b.valueType))
}
@ -157,20 +160,29 @@ func (b *BufferedSeriesIterator) AtT() int64 {
return b.it.AtT()
}
// AtST returns the current sample's start timestamp of the iterator.
func (b *BufferedSeriesIterator) AtST() int64 {
return b.it.AtST()
}
// Err returns the last encountered error.
func (b *BufferedSeriesIterator) Err() error {
return b.it.Err()
}
type fSample struct {
t int64
f float64
st, t int64
f float64
}
func (s fSample) T() int64 {
return s.t
}
func (s fSample) ST() int64 {
return s.st
}
func (s fSample) F() float64 {
return s.f
}
@ -192,14 +204,18 @@ func (s fSample) Copy() chunks.Sample {
}
type hSample struct {
t int64
h *histogram.Histogram
st, t int64
h *histogram.Histogram
}
func (s hSample) T() int64 {
return s.t
}
func (s hSample) ST() int64 {
return s.st
}
func (hSample) F() float64 {
panic("F() called for hSample")
}
@ -217,18 +233,22 @@ func (hSample) Type() chunkenc.ValueType {
}
func (s hSample) Copy() chunks.Sample {
return hSample{t: s.t, h: s.h.Copy()}
return hSample{st: s.st, t: s.t, h: s.h.Copy()}
}
type fhSample struct {
t int64
fh *histogram.FloatHistogram
st, t int64
fh *histogram.FloatHistogram
}
func (s fhSample) T() int64 {
return s.t
}
func (s fhSample) ST() int64 {
return s.st
}
func (fhSample) F() float64 {
panic("F() called for fhSample")
}
@ -246,7 +266,7 @@ func (fhSample) Type() chunkenc.ValueType {
}
func (s fhSample) Copy() chunks.Sample {
return fhSample{t: s.t, fh: s.fh.Copy()}
return fhSample{st: s.st, t: s.t, fh: s.fh.Copy()}
}
type sampleRing struct {
@ -329,6 +349,7 @@ func (r *sampleRing) iterator() *SampleRingIterator {
type SampleRingIterator struct {
r *sampleRing
i int
st int64
t int64
f float64
h *histogram.Histogram
@ -350,21 +371,25 @@ func (it *SampleRingIterator) Next() chunkenc.ValueType {
switch it.r.bufInUse {
case fBuf:
s := it.r.atF(it.i)
it.st = s.st
it.t = s.t
it.f = s.f
return chunkenc.ValFloat
case hBuf:
s := it.r.atH(it.i)
it.st = s.st
it.t = s.t
it.h = s.h
return chunkenc.ValHistogram
case fhBuf:
s := it.r.atFH(it.i)
it.st = s.st
it.t = s.t
it.fh = s.fh
return chunkenc.ValFloatHistogram
}
s := it.r.at(it.i)
it.st = s.ST()
it.t = s.T()
switch s.Type() {
case chunkenc.ValHistogram:
@ -410,6 +435,10 @@ func (it *SampleRingIterator) AtT() int64 {
return it.t
}
func (it *SampleRingIterator) AtST() int64 {
return it.st
}
func (r *sampleRing) at(i int) chunks.Sample {
j := (r.f + i) % len(r.iBuf)
return r.iBuf[j]
@ -651,6 +680,7 @@ func addH(s hSample, buf []hSample, r *sampleRing) []hSample {
}
buf[r.i].t = s.t
buf[r.i].st = s.st
if buf[r.i].h == nil {
buf[r.i].h = s.h.Copy()
} else {
@ -695,6 +725,7 @@ func addFH(s fhSample, buf []fhSample, r *sampleRing) []fhSample {
}
buf[r.i].t = s.t
buf[r.i].st = s.st
if buf[r.i].fh == nil {
buf[r.i].fh = s.fh.Copy()
} else {

View file

@ -61,10 +61,9 @@ func TestSampleRing(t *testing.T) {
input := []fSample{}
for _, t := range c.input {
input = append(input, fSample{
t: t,
f: float64(rand.Intn(100)),
})
// Randomize start timestamp to make sure it does not affect the
// outcome.
input = append(input, fSample{st: rand.Int63(), t: t, f: float64(rand.Intn(100))})
}
for i, s := range input {
@ -90,6 +89,24 @@ func TestSampleRing(t *testing.T) {
}
}
func TestSampleRingFloatST(t *testing.T) {
r := newSampleRing(10, 5, chunkenc.ValNone)
require.Empty(t, r.fBuf)
require.Empty(t, r.hBuf)
require.Empty(t, r.fhBuf)
require.Empty(t, r.iBuf)
r.addF(fSample{st: 100, t: 11, f: 3.14})
it := r.iterator()
require.Equal(t, chunkenc.ValFloat, it.Next())
ts, f := it.At()
require.Equal(t, int64(11), ts)
require.Equal(t, 3.14, f)
require.Equal(t, int64(100), it.AtST())
require.Equal(t, chunkenc.ValNone, it.Next())
}
func TestSampleRingMixed(t *testing.T) {
h1 := tsdbutil.GenerateTestHistogram(1)
h2 := tsdbutil.GenerateTestHistogram(2)
@ -102,39 +119,43 @@ func TestSampleRingMixed(t *testing.T) {
require.Empty(t, r.iBuf)
// But then mixed adds should work as expected.
r.addF(fSample{t: 1, f: 3.14})
r.addH(hSample{t: 2, h: h1})
r.addF(fSample{st: 10, t: 11, f: 3.14})
r.addH(hSample{st: 20, t: 21, h: h1})
it := r.iterator()
require.Equal(t, chunkenc.ValFloat, it.Next())
ts, f := it.At()
require.Equal(t, int64(1), ts)
require.Equal(t, int64(11), ts)
require.Equal(t, 3.14, f)
require.Equal(t, int64(10), it.AtST())
require.Equal(t, chunkenc.ValHistogram, it.Next())
var h *histogram.Histogram
ts, h = it.AtHistogram()
require.Equal(t, int64(2), ts)
require.Equal(t, int64(21), ts)
require.Equal(t, h1, h)
require.Equal(t, int64(20), it.AtST())
require.Equal(t, chunkenc.ValNone, it.Next())
r.reset()
it = r.iterator()
require.Equal(t, chunkenc.ValNone, it.Next())
r.addF(fSample{t: 3, f: 4.2})
r.addH(hSample{t: 4, h: h2})
r.addF(fSample{st: 30, t: 31, f: 4.2})
r.addH(hSample{st: 40, t: 41, h: h2})
it = r.iterator()
require.Equal(t, chunkenc.ValFloat, it.Next())
ts, f = it.At()
require.Equal(t, int64(3), ts)
require.Equal(t, int64(31), ts)
require.Equal(t, 4.2, f)
require.Equal(t, int64(30), it.AtST())
require.Equal(t, chunkenc.ValHistogram, it.Next())
ts, h = it.AtHistogram()
require.Equal(t, int64(4), ts)
require.Equal(t, int64(41), ts)
require.Equal(t, h2, h)
require.Equal(t, int64(40), it.AtST())
require.Equal(t, chunkenc.ValNone, it.Next())
}
@ -160,44 +181,50 @@ func TestSampleRingAtFloatHistogram(t *testing.T) {
it := r.iterator()
require.Equal(t, chunkenc.ValNone, it.Next())
r.addFH(fhSample{t: 1, fh: fh1})
r.addFH(fhSample{t: 2, fh: fh2})
r.addFH(fhSample{st: 10, t: 11, fh: fh1})
r.addFH(fhSample{st: 20, t: 21, fh: fh2})
it = r.iterator()
require.Equal(t, chunkenc.ValFloatHistogram, it.Next())
ts, fh = it.AtFloatHistogram(fh)
require.Equal(t, int64(1), ts)
require.Equal(t, int64(11), ts)
require.Equal(t, fh1, fh)
require.Equal(t, int64(10), it.AtST())
require.Equal(t, chunkenc.ValFloatHistogram, it.Next())
ts, fh = it.AtFloatHistogram(fh)
require.Equal(t, int64(2), ts)
require.Equal(t, int64(21), ts)
require.Equal(t, fh2, fh)
require.Equal(t, int64(20), it.AtST())
require.Equal(t, chunkenc.ValNone, it.Next())
r.reset()
it = r.iterator()
require.Equal(t, chunkenc.ValNone, it.Next())
r.addH(hSample{t: 3, h: h1})
r.addH(hSample{t: 4, h: h2})
r.addH(hSample{st: 30, t: 31, h: h1})
r.addH(hSample{st: 40, t: 41, h: h2})
it = r.iterator()
require.Equal(t, chunkenc.ValHistogram, it.Next())
ts, h = it.AtHistogram()
require.Equal(t, int64(3), ts)
require.Equal(t, int64(31), ts)
require.Equal(t, h1, h)
require.Equal(t, int64(30), it.AtST())
ts, fh = it.AtFloatHistogram(fh)
require.Equal(t, int64(3), ts)
require.Equal(t, int64(31), ts)
require.Equal(t, h1.ToFloat(nil), fh)
require.Equal(t, int64(30), it.AtST())
require.Equal(t, chunkenc.ValHistogram, it.Next())
ts, h = it.AtHistogram()
require.Equal(t, int64(4), ts)
require.Equal(t, int64(41), ts)
require.Equal(t, h2, h)
require.Equal(t, int64(40), it.AtST())
ts, fh = it.AtFloatHistogram(fh)
require.Equal(t, int64(4), ts)
require.Equal(t, int64(41), ts)
require.Equal(t, h2.ToFloat(nil), fh)
require.Equal(t, int64(40), it.AtST())
require.Equal(t, chunkenc.ValNone, it.Next())
}
@ -209,59 +236,63 @@ func TestBufferedSeriesIterator(t *testing.T) {
bit := it.Buffer()
for bit.Next() == chunkenc.ValFloat {
t, f := bit.At()
b = append(b, fSample{t: t, f: f})
st := bit.AtST()
b = append(b, fSample{st: st, t: t, f: f})
}
require.Equal(t, exp, b, "buffer mismatch")
}
sampleEq := func(ets int64, ev float64) {
sampleEq := func(est, ets int64, ev float64) {
ts, v := it.At()
st := it.AtST()
require.Equal(t, est, st, "start timestamp mismatch")
require.Equal(t, ets, ts, "timestamp mismatch")
require.Equal(t, ev, v, "value mismatch")
}
prevSampleEq := func(ets int64, ev float64, eok bool) {
prevSampleEq := func(est, ets int64, ev float64, eok bool) {
s, ok := it.PeekBack(1)
require.Equal(t, eok, ok, "exist mismatch")
require.Equal(t, est, s.ST(), "start timestamp mismatch")
require.Equal(t, ets, s.T(), "timestamp mismatch")
require.Equal(t, ev, s.F(), "value mismatch")
}
it = NewBufferIterator(NewListSeriesIterator(samples{
fSample{t: 1, f: 2},
fSample{t: 2, f: 3},
fSample{t: 3, f: 4},
fSample{t: 4, f: 5},
fSample{t: 5, f: 6},
fSample{t: 99, f: 8},
fSample{t: 100, f: 9},
fSample{t: 101, f: 10},
fSample{st: -1, t: 1, f: 2},
fSample{st: 1, t: 2, f: 3},
fSample{st: 2, t: 3, f: 4},
fSample{st: 3, t: 4, f: 5},
fSample{st: 3, t: 5, f: 6},
fSample{st: 50, t: 99, f: 8},
fSample{st: 99, t: 100, f: 9},
fSample{st: 100, t: 101, f: 10},
}), 2)
require.Equal(t, chunkenc.ValFloat, it.Seek(-123), "seek failed")
sampleEq(1, 2)
prevSampleEq(0, 0, false)
sampleEq(-1, 1, 2)
prevSampleEq(0, 0, 0, false)
bufferEq(nil)
require.Equal(t, chunkenc.ValFloat, it.Next(), "next failed")
sampleEq(2, 3)
prevSampleEq(1, 2, true)
bufferEq([]fSample{{t: 1, f: 2}})
sampleEq(1, 2, 3)
prevSampleEq(-1, 1, 2, true)
bufferEq([]fSample{{st: -1, t: 1, f: 2}})
require.Equal(t, chunkenc.ValFloat, it.Next(), "next failed")
require.Equal(t, chunkenc.ValFloat, it.Next(), "next failed")
require.Equal(t, chunkenc.ValFloat, it.Next(), "next failed")
sampleEq(5, 6)
prevSampleEq(4, 5, true)
bufferEq([]fSample{{t: 2, f: 3}, {t: 3, f: 4}, {t: 4, f: 5}})
sampleEq(3, 5, 6)
prevSampleEq(3, 4, 5, true)
bufferEq([]fSample{{st: 1, t: 2, f: 3}, {st: 2, t: 3, f: 4}, {st: 3, t: 4, f: 5}})
require.Equal(t, chunkenc.ValFloat, it.Seek(5), "seek failed")
sampleEq(5, 6)
prevSampleEq(4, 5, true)
bufferEq([]fSample{{t: 2, f: 3}, {t: 3, f: 4}, {t: 4, f: 5}})
sampleEq(3, 5, 6)
prevSampleEq(3, 4, 5, true)
bufferEq([]fSample{{st: 1, t: 2, f: 3}, {st: 2, t: 3, f: 4}, {st: 3, t: 4, f: 5}})
require.Equal(t, chunkenc.ValFloat, it.Seek(101), "seek failed")
sampleEq(101, 10)
prevSampleEq(100, 9, true)
bufferEq([]fSample{{t: 99, f: 8}, {t: 100, f: 9}})
sampleEq(100, 101, 10)
prevSampleEq(99, 100, 9, true)
bufferEq([]fSample{{st: 50, t: 99, f: 8}, {st: 99, t: 100, f: 9}})
require.Equal(t, chunkenc.ValNone, it.Next(), "next succeeded unexpectedly")
require.Equal(t, chunkenc.ValNone, it.Seek(1024), "seek succeeded unexpectedly")
@ -402,6 +433,10 @@ func (*mockSeriesIterator) AtT() int64 {
return 0 // Not really mocked.
}
func (*mockSeriesIterator) AtST() int64 {
return 0 // Not really mocked.
}
type fakeSeriesIterator struct {
nsamples int64
step int64
@ -428,6 +463,10 @@ func (it *fakeSeriesIterator) AtT() int64 {
return it.idx * it.step
}
func (*fakeSeriesIterator) AtST() int64 {
return 0 // No start timestamps in this fake iterator.
}
func (it *fakeSeriesIterator) Next() chunkenc.ValueType {
it.idx++
if it.idx >= it.nsamples {

View file

@ -473,9 +473,10 @@ type Series interface {
}
type mockSeries struct {
timestamps []int64
values []float64
labelSet []string
startTimestamps []int64
timestamps []int64
values []float64
labelSet []string
}
func (s mockSeries) Labels() labels.Labels {
@ -483,15 +484,19 @@ func (s mockSeries) Labels() labels.Labels {
}
func (s mockSeries) Iterator(chunkenc.Iterator) chunkenc.Iterator {
return chunkenc.MockSeriesIterator(s.timestamps, s.values)
return chunkenc.MockSeriesIterator(s.startTimestamps, s.timestamps, s.values)
}
// MockSeries returns a series with custom timestamps, values and labelSet.
func MockSeries(timestamps []int64, values []float64, labelSet []string) Series {
// MockSeries returns a series with custom start timestamp, timestamps, values,
// and labelSet.
// Start timestamps is optional, pass nil or empty slice to indicate no start
// timestamps.
func MockSeries(startTimestamps, timestamps []int64, values []float64, labelSet []string) Series {
return mockSeries{
timestamps: timestamps,
values: values,
labelSet: labelSet,
startTimestamps: startTimestamps,
timestamps: timestamps,
values: values,
labelSet: labelSet,
}
}

View file

@ -23,7 +23,7 @@ import (
)
func TestMockSeries(t *testing.T) {
s := storage.MockSeries([]int64{1, 2, 3}, []float64{1, 2, 3}, []string{"__name__", "foo"})
s := storage.MockSeries(nil, []int64{1, 2, 3}, []float64{1, 2, 3}, []string{"__name__", "foo"})
it := s.Iterator(nil)
ts := []int64{}
vs := []float64{}
@ -35,3 +35,20 @@ func TestMockSeries(t *testing.T) {
require.Equal(t, []int64{1, 2, 3}, ts)
require.Equal(t, []float64{1, 2, 3}, vs)
}
func TestMockSeriesWithST(t *testing.T) {
s := storage.MockSeries([]int64{0, 1, 2}, []int64{1, 2, 3}, []float64{1, 2, 3}, []string{"__name__", "foo"})
it := s.Iterator(nil)
ts := []int64{}
vs := []float64{}
st := []int64{}
for it.Next() == chunkenc.ValFloat {
t, v := it.At()
ts = append(ts, t)
vs = append(vs, v)
st = append(st, it.AtST())
}
require.Equal(t, []int64{1, 2, 3}, ts)
require.Equal(t, []float64{1, 2, 3}, vs)
require.Equal(t, []int64{0, 1, 2}, st)
}

View file

@ -599,6 +599,13 @@ func (c *chainSampleIterator) AtT() int64 {
return c.curr.AtT()
}
func (c *chainSampleIterator) AtST() int64 {
if c.curr == nil {
panic("chainSampleIterator.AtST called before first .Next or after .Next returned false.")
}
return c.curr.AtST()
}
func (c *chainSampleIterator) Next() chunkenc.ValueType {
var (
currT int64

View file

@ -66,116 +66,116 @@ func TestMergeQuerierWithChainMerger(t *testing.T) {
{
name: "one querier, two series",
querierSeries: [][]Series{{
NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}}),
NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}}),
NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}}),
NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}, fSample{0, 2, 2}}),
}},
expected: NewMockSeriesSet(
NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}}),
NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}}),
NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}}),
NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}, fSample{0, 2, 2}}),
),
},
{
name: "two queriers, one different series each",
querierSeries: [][]Series{{
NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}}),
NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}}),
}, {
NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}}),
NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}, fSample{0, 2, 2}}),
}},
expected: NewMockSeriesSet(
NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}}),
NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}}),
NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}}),
NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}, fSample{0, 2, 2}}),
),
},
{
name: "two time unsorted queriers, two series each",
querierSeries: [][]Series{{
NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{5, 5}, fSample{6, 6}}),
NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}}),
NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 5, 5}, fSample{0, 6, 6}}),
NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}, fSample{0, 2, 2}}),
}, {
NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}}),
NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{3, 3}, fSample{4, 4}}),
NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}}),
NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 3, 3}, fSample{0, 4, 4}}),
}},
expected: NewMockSeriesSet(
NewListSeries(
labels.FromStrings("bar", "baz"),
[]chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}, fSample{6, 6}},
[]chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 5, 5}, fSample{0, 6, 6}},
),
NewListSeries(
labels.FromStrings("foo", "bar"),
[]chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{4, 4}},
[]chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 4, 4}},
),
),
},
{
name: "five queriers, only two queriers have two time unsorted series each",
querierSeries: [][]Series{{}, {}, {
NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{5, 5}, fSample{6, 6}}),
NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}}),
NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 5, 5}, fSample{0, 6, 6}}),
NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}, fSample{0, 2, 2}}),
}, {
NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}}),
NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{3, 3}, fSample{4, 4}}),
NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}}),
NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 3, 3}, fSample{0, 4, 4}}),
}, {}},
expected: NewMockSeriesSet(
NewListSeries(
labels.FromStrings("bar", "baz"),
[]chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}, fSample{6, 6}},
[]chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 5, 5}, fSample{0, 6, 6}},
),
NewListSeries(
labels.FromStrings("foo", "bar"),
[]chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{4, 4}},
[]chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 4, 4}},
),
),
},
{
name: "two queriers, only two queriers have two time unsorted series each, with 3 noop and one nil querier together",
querierSeries: [][]Series{{}, {}, {
NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{5, 5}, fSample{6, 6}}),
NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}}),
NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 5, 5}, fSample{0, 6, 6}}),
NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}, fSample{0, 2, 2}}),
}, {
NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}}),
NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{3, 3}, fSample{4, 4}}),
NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}}),
NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 3, 3}, fSample{0, 4, 4}}),
}, {}},
extraQueriers: []Querier{NoopQuerier(), NoopQuerier(), nil, NoopQuerier()},
expected: NewMockSeriesSet(
NewListSeries(
labels.FromStrings("bar", "baz"),
[]chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}, fSample{6, 6}},
[]chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 5, 5}, fSample{0, 6, 6}},
),
NewListSeries(
labels.FromStrings("foo", "bar"),
[]chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{4, 4}},
[]chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 4, 4}},
),
),
},
{
name: "two queriers, with two series, one is overlapping",
querierSeries: [][]Series{{}, {}, {
NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{2, 21}, fSample{3, 31}, fSample{5, 5}, fSample{6, 6}}),
NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}}),
NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 2, 21}, fSample{0, 3, 31}, fSample{0, 5, 5}, fSample{0, 6, 6}}),
NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}, fSample{0, 2, 2}}),
}, {
NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 22}, fSample{3, 32}}),
NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{3, 3}, fSample{4, 4}}),
NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 22}, fSample{0, 3, 32}}),
NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 3, 3}, fSample{0, 4, 4}}),
}, {}},
expected: NewMockSeriesSet(
NewListSeries(
labels.FromStrings("bar", "baz"),
[]chunks.Sample{fSample{1, 1}, fSample{2, 21}, fSample{3, 31}, fSample{5, 5}, fSample{6, 6}},
[]chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 21}, fSample{0, 3, 31}, fSample{0, 5, 5}, fSample{0, 6, 6}},
),
NewListSeries(
labels.FromStrings("foo", "bar"),
[]chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{4, 4}},
[]chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 4, 4}},
),
),
},
{
name: "two queries, one with NaN samples series",
querierSeries: [][]Series{{
NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, math.NaN()}}),
NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0, math.NaN()}}),
}, {
NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{1, 1}}),
NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 1, 1}}),
}},
expected: NewMockSeriesSet(
NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, math.NaN()}, fSample{1, 1}}),
NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0, math.NaN()}, fSample{0, 1, 1}}),
),
},
} {
@ -249,108 +249,108 @@ func TestMergeChunkQuerierWithNoVerticalChunkSeriesMerger(t *testing.T) {
{
name: "one querier, two series",
chkQuerierSeries: [][]ChunkSeries{{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}}, []chunks.Sample{fSample{2, 2}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, []chunks.Sample{fSample{0, 3, 3}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}}, []chunks.Sample{fSample{0, 2, 2}}),
}},
expected: NewMockChunkSeriesSet(
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}}, []chunks.Sample{fSample{2, 2}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, []chunks.Sample{fSample{0, 3, 3}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}}, []chunks.Sample{fSample{0, 2, 2}}),
),
},
{
name: "two secondaries, one different series each",
chkQuerierSeries: [][]ChunkSeries{{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, []chunks.Sample{fSample{0, 3, 3}}),
}, {
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}}, []chunks.Sample{fSample{2, 2}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}}, []chunks.Sample{fSample{0, 2, 2}}),
}},
expected: NewMockChunkSeriesSet(
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}}, []chunks.Sample{fSample{2, 2}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, []chunks.Sample{fSample{0, 3, 3}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}}, []chunks.Sample{fSample{0, 2, 2}}),
),
},
{
name: "two secondaries, two not in time order series each",
chkQuerierSeries: [][]ChunkSeries{{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{5, 5}}, []chunks.Sample{fSample{6, 6}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}}, []chunks.Sample{fSample{2, 2}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 5, 5}}, []chunks.Sample{fSample{0, 6, 6}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}}, []chunks.Sample{fSample{0, 2, 2}}),
}, {
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{3, 3}}, []chunks.Sample{fSample{4, 4}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, []chunks.Sample{fSample{0, 3, 3}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 3, 3}}, []chunks.Sample{fSample{0, 4, 4}}),
}},
expected: NewMockChunkSeriesSet(
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
[]chunks.Sample{fSample{1, 1}, fSample{2, 2}},
[]chunks.Sample{fSample{3, 3}},
[]chunks.Sample{fSample{5, 5}},
[]chunks.Sample{fSample{6, 6}},
[]chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}},
[]chunks.Sample{fSample{0, 3, 3}},
[]chunks.Sample{fSample{0, 5, 5}},
[]chunks.Sample{fSample{0, 6, 6}},
),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"),
[]chunks.Sample{fSample{0, 0}, fSample{1, 1}},
[]chunks.Sample{fSample{2, 2}},
[]chunks.Sample{fSample{3, 3}},
[]chunks.Sample{fSample{4, 4}},
[]chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}},
[]chunks.Sample{fSample{0, 2, 2}},
[]chunks.Sample{fSample{0, 3, 3}},
[]chunks.Sample{fSample{0, 4, 4}},
),
),
},
{
name: "five secondaries, only two have two not in time order series each",
chkQuerierSeries: [][]ChunkSeries{{}, {}, {
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{5, 5}}, []chunks.Sample{fSample{6, 6}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}}, []chunks.Sample{fSample{2, 2}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 5, 5}}, []chunks.Sample{fSample{0, 6, 6}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}}, []chunks.Sample{fSample{0, 2, 2}}),
}, {
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{3, 3}}, []chunks.Sample{fSample{4, 4}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, []chunks.Sample{fSample{0, 3, 3}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 3, 3}}, []chunks.Sample{fSample{0, 4, 4}}),
}, {}},
expected: NewMockChunkSeriesSet(
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
[]chunks.Sample{fSample{1, 1}, fSample{2, 2}},
[]chunks.Sample{fSample{3, 3}},
[]chunks.Sample{fSample{5, 5}},
[]chunks.Sample{fSample{6, 6}},
[]chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}},
[]chunks.Sample{fSample{0, 3, 3}},
[]chunks.Sample{fSample{0, 5, 5}},
[]chunks.Sample{fSample{0, 6, 6}},
),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"),
[]chunks.Sample{fSample{0, 0}, fSample{1, 1}},
[]chunks.Sample{fSample{2, 2}},
[]chunks.Sample{fSample{3, 3}},
[]chunks.Sample{fSample{4, 4}},
[]chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}},
[]chunks.Sample{fSample{0, 2, 2}},
[]chunks.Sample{fSample{0, 3, 3}},
[]chunks.Sample{fSample{0, 4, 4}},
),
),
},
{
name: "two secondaries, with two not in time order series each, with 3 noop queries and one nil together",
chkQuerierSeries: [][]ChunkSeries{{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{5, 5}}, []chunks.Sample{fSample{6, 6}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}}, []chunks.Sample{fSample{2, 2}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 5, 5}}, []chunks.Sample{fSample{0, 6, 6}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}}, []chunks.Sample{fSample{0, 2, 2}}),
}, {
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{3, 3}}, []chunks.Sample{fSample{4, 4}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, []chunks.Sample{fSample{0, 3, 3}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 3, 3}}, []chunks.Sample{fSample{0, 4, 4}}),
}},
extraQueriers: []ChunkQuerier{NoopChunkedQuerier(), NoopChunkedQuerier(), nil, NoopChunkedQuerier()},
expected: NewMockChunkSeriesSet(
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
[]chunks.Sample{fSample{1, 1}, fSample{2, 2}},
[]chunks.Sample{fSample{3, 3}},
[]chunks.Sample{fSample{5, 5}},
[]chunks.Sample{fSample{6, 6}},
[]chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}},
[]chunks.Sample{fSample{0, 3, 3}},
[]chunks.Sample{fSample{0, 5, 5}},
[]chunks.Sample{fSample{0, 6, 6}},
),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"),
[]chunks.Sample{fSample{0, 0}, fSample{1, 1}},
[]chunks.Sample{fSample{2, 2}},
[]chunks.Sample{fSample{3, 3}},
[]chunks.Sample{fSample{4, 4}},
[]chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}},
[]chunks.Sample{fSample{0, 2, 2}},
[]chunks.Sample{fSample{0, 3, 3}},
[]chunks.Sample{fSample{0, 4, 4}},
),
),
},
{
name: "two queries, one with NaN samples series",
chkQuerierSeries: [][]ChunkSeries{{
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, math.NaN()}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0, math.NaN()}}),
}, {
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{1, 1}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 1, 1}}),
}},
expected: NewMockChunkSeriesSet(
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, math.NaN()}}, []chunks.Sample{fSample{1, 1}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0, math.NaN()}}, []chunks.Sample{fSample{0, 1, 1}}),
),
},
} {
@ -387,13 +387,13 @@ func TestMergeChunkQuerierWithNoVerticalChunkSeriesMerger(t *testing.T) {
func histogramSample(ts int64, hint histogram.CounterResetHint) hSample {
h := tsdbutil.GenerateTestHistogram(ts + 1)
h.CounterResetHint = hint
return hSample{t: ts, h: h}
return hSample{st: -ts, t: ts, h: h}
}
func floatHistogramSample(ts int64, hint histogram.CounterResetHint) fhSample {
fh := tsdbutil.GenerateTestFloatHistogram(ts + 1)
fh.CounterResetHint = hint
return fhSample{t: ts, fh: fh}
return fhSample{st: -ts, t: ts, fh: fh}
}
// Shorthands for counter reset hints.
@ -431,9 +431,9 @@ func TestCompactingChunkSeriesMerger(t *testing.T) {
{
name: "single series",
input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, []chunks.Sample{fSample{0, 3, 3}}),
},
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}}),
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, []chunks.Sample{fSample{0, 3, 3}}),
},
{
name: "two empty series",
@ -446,55 +446,55 @@ func TestCompactingChunkSeriesMerger(t *testing.T) {
{
name: "two non overlapping",
input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}, fSample{5, 5}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{7, 7}, fSample{9, 9}}, []chunks.Sample{fSample{10, 10}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, []chunks.Sample{fSample{0, 3, 3}, fSample{0, 5, 5}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 7, 7}, fSample{0, 9, 9}}, []chunks.Sample{fSample{0, 10, 10}}),
},
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}, fSample{5, 5}}, []chunks.Sample{fSample{7, 7}, fSample{9, 9}}, []chunks.Sample{fSample{10, 10}}),
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, []chunks.Sample{fSample{0, 3, 3}, fSample{0, 5, 5}}, []chunks.Sample{fSample{0, 7, 7}, fSample{0, 9, 9}}, []chunks.Sample{fSample{0, 10, 10}}),
},
{
name: "two overlapping",
input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}, fSample{8, 8}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{7, 7}, fSample{9, 9}}, []chunks.Sample{fSample{10, 10}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, []chunks.Sample{fSample{0, 3, 3}, fSample{0, 8, 8}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 7, 7}, fSample{0, 9, 9}}, []chunks.Sample{fSample{0, 10, 10}}),
},
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}, fSample{7, 7}, fSample{8, 8}, fSample{9, 9}}, []chunks.Sample{fSample{10, 10}}),
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, []chunks.Sample{fSample{0, 3, 3}, fSample{0, 7, 7}, fSample{0, 8, 8}, fSample{0, 9, 9}}, []chunks.Sample{fSample{0, 10, 10}}),
},
{
name: "two duplicated",
input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 5, 5}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 5, 5}}),
},
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}),
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 5, 5}}),
},
{
name: "three overlapping",
input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{2, 2}, fSample{3, 3}, fSample{6, 6}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 0}, fSample{4, 4}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 5, 5}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 6, 6}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 4, 4}}),
},
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{4, 4}, fSample{5, 5}, fSample{6, 6}}),
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 4, 4}, fSample{0, 5, 5}, fSample{0, 6, 6}}),
},
{
name: "three in chained overlap",
input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{4, 4}, fSample{6, 66}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{6, 6}, fSample{10, 10}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 5, 5}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 4, 4}, fSample{0, 6, 66}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 6, 6}, fSample{0, 10, 10}}),
},
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{4, 4}, fSample{5, 5}, fSample{6, 66}, fSample{10, 10}}),
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 4, 4}, fSample{0, 5, 5}, fSample{0, 6, 66}, fSample{0, 10, 10}}),
},
{
name: "three in chained overlap complex",
input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 0}, fSample{5, 5}}, []chunks.Sample{fSample{10, 10}, fSample{15, 15}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{2, 2}, fSample{20, 20}}, []chunks.Sample{fSample{25, 25}, fSample{30, 30}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{18, 18}, fSample{26, 26}}, []chunks.Sample{fSample{31, 31}, fSample{35, 35}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 5, 5}}, []chunks.Sample{fSample{0, 10, 10}, fSample{0, 15, 15}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 2, 2}, fSample{0, 20, 20}}, []chunks.Sample{fSample{0, 25, 25}, fSample{0, 30, 30}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 18, 18}, fSample{0, 26, 26}}, []chunks.Sample{fSample{0, 31, 31}, fSample{0, 35, 35}}),
},
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
[]chunks.Sample{fSample{0, 0}, fSample{2, 2}, fSample{5, 5}, fSample{10, 10}, fSample{15, 15}, fSample{18, 18}, fSample{20, 20}, fSample{25, 25}, fSample{26, 26}, fSample{30, 30}},
[]chunks.Sample{fSample{31, 31}, fSample{35, 35}},
[]chunks.Sample{fSample{0, 0, 0}, fSample{0, 2, 2}, fSample{0, 5, 5}, fSample{0, 10, 10}, fSample{0, 15, 15}, fSample{0, 18, 18}, fSample{0, 20, 20}, fSample{0, 25, 25}, fSample{0, 26, 26}, fSample{0, 30, 30}},
[]chunks.Sample{fSample{0, 31, 31}, fSample{0, 35, 35}},
),
},
{
@ -534,13 +534,13 @@ func TestCompactingChunkSeriesMerger(t *testing.T) {
name: "histogram chunks overlapping with float chunks",
input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{histogramSample(0), histogramSample(5)}, []chunks.Sample{histogramSample(10), histogramSample(15)}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{12, 12}}, []chunks.Sample{fSample{14, 14}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 12, 12}}, []chunks.Sample{fSample{0, 14, 14}}),
},
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
[]chunks.Sample{histogramSample(0)},
[]chunks.Sample{fSample{1, 1}},
[]chunks.Sample{fSample{0, 1, 1}},
[]chunks.Sample{histogramSample(5), histogramSample(10)},
[]chunks.Sample{fSample{12, 12}, fSample{14, 14}},
[]chunks.Sample{fSample{0, 12, 12}, fSample{0, 14, 14}},
[]chunks.Sample{histogramSample(15)},
),
},
@ -560,13 +560,13 @@ func TestCompactingChunkSeriesMerger(t *testing.T) {
name: "float histogram chunks overlapping with float chunks",
input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{floatHistogramSample(0), floatHistogramSample(5)}, []chunks.Sample{floatHistogramSample(10), floatHistogramSample(15)}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{12, 12}}, []chunks.Sample{fSample{14, 14}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 12, 12}}, []chunks.Sample{fSample{0, 14, 14}}),
},
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
[]chunks.Sample{floatHistogramSample(0)},
[]chunks.Sample{fSample{1, 1}},
[]chunks.Sample{fSample{0, 1, 1}},
[]chunks.Sample{floatHistogramSample(5), floatHistogramSample(10)},
[]chunks.Sample{fSample{12, 12}, fSample{14, 14}},
[]chunks.Sample{fSample{0, 12, 12}, fSample{0, 14, 14}},
[]chunks.Sample{floatHistogramSample(15)},
),
},
@ -736,9 +736,9 @@ func TestConcatenatingChunkSeriesMerger(t *testing.T) {
{
name: "single series",
input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, []chunks.Sample{fSample{0, 3, 3}}),
},
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}}),
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, []chunks.Sample{fSample{0, 3, 3}}),
},
{
name: "two empty series",
@ -751,70 +751,70 @@ func TestConcatenatingChunkSeriesMerger(t *testing.T) {
{
name: "two non overlapping",
input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}, fSample{5, 5}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{7, 7}, fSample{9, 9}}, []chunks.Sample{fSample{10, 10}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, []chunks.Sample{fSample{0, 3, 3}, fSample{0, 5, 5}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 7, 7}, fSample{0, 9, 9}}, []chunks.Sample{fSample{0, 10, 10}}),
},
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}, fSample{5, 5}}, []chunks.Sample{fSample{7, 7}, fSample{9, 9}}, []chunks.Sample{fSample{10, 10}}),
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, []chunks.Sample{fSample{0, 3, 3}, fSample{0, 5, 5}}, []chunks.Sample{fSample{0, 7, 7}, fSample{0, 9, 9}}, []chunks.Sample{fSample{0, 10, 10}}),
},
{
name: "two overlapping",
input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}, fSample{8, 8}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{7, 7}, fSample{9, 9}}, []chunks.Sample{fSample{10, 10}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, []chunks.Sample{fSample{0, 3, 3}, fSample{0, 8, 8}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 7, 7}, fSample{0, 9, 9}}, []chunks.Sample{fSample{0, 10, 10}}),
},
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
[]chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}, fSample{8, 8}},
[]chunks.Sample{fSample{7, 7}, fSample{9, 9}}, []chunks.Sample{fSample{10, 10}},
[]chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, []chunks.Sample{fSample{0, 3, 3}, fSample{0, 8, 8}},
[]chunks.Sample{fSample{0, 7, 7}, fSample{0, 9, 9}}, []chunks.Sample{fSample{0, 10, 10}},
),
},
{
name: "two duplicated",
input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 5, 5}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 5, 5}}),
},
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
[]chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}},
[]chunks.Sample{fSample{2, 2}, fSample{3, 3}, fSample{5, 5}},
[]chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 5, 5}},
[]chunks.Sample{fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 5, 5}},
),
},
{
name: "three overlapping",
input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{2, 2}, fSample{3, 3}, fSample{6, 6}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 0}, fSample{4, 4}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 5, 5}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 6, 6}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 4, 4}}),
},
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
[]chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}},
[]chunks.Sample{fSample{2, 2}, fSample{3, 3}, fSample{6, 6}},
[]chunks.Sample{fSample{0, 0}, fSample{4, 4}},
[]chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 5, 5}},
[]chunks.Sample{fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 6, 6}},
[]chunks.Sample{fSample{0, 0, 0}, fSample{0, 4, 4}},
),
},
{
name: "three in chained overlap",
input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{4, 4}, fSample{6, 66}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{6, 6}, fSample{10, 10}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 5, 5}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 4, 4}, fSample{0, 6, 66}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 6, 6}, fSample{0, 10, 10}}),
},
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
[]chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}},
[]chunks.Sample{fSample{4, 4}, fSample{6, 66}},
[]chunks.Sample{fSample{6, 6}, fSample{10, 10}},
[]chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 5, 5}},
[]chunks.Sample{fSample{0, 4, 4}, fSample{0, 6, 66}},
[]chunks.Sample{fSample{0, 6, 6}, fSample{0, 10, 10}},
),
},
{
name: "three in chained overlap complex",
input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 0}, fSample{5, 5}}, []chunks.Sample{fSample{10, 10}, fSample{15, 15}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{2, 2}, fSample{20, 20}}, []chunks.Sample{fSample{25, 25}, fSample{30, 30}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{18, 18}, fSample{26, 26}}, []chunks.Sample{fSample{31, 31}, fSample{35, 35}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 5, 5}}, []chunks.Sample{fSample{0, 10, 10}, fSample{0, 15, 15}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 2, 2}, fSample{0, 20, 20}}, []chunks.Sample{fSample{0, 25, 25}, fSample{0, 30, 30}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 18, 18}, fSample{0, 26, 26}}, []chunks.Sample{fSample{0, 31, 31}, fSample{0, 35, 35}}),
},
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
[]chunks.Sample{fSample{0, 0}, fSample{5, 5}}, []chunks.Sample{fSample{10, 10}, fSample{15, 15}},
[]chunks.Sample{fSample{2, 2}, fSample{20, 20}}, []chunks.Sample{fSample{25, 25}, fSample{30, 30}},
[]chunks.Sample{fSample{18, 18}, fSample{26, 26}}, []chunks.Sample{fSample{31, 31}, fSample{35, 35}},
[]chunks.Sample{fSample{0, 0, 0}, fSample{0, 5, 5}}, []chunks.Sample{fSample{0, 10, 10}, fSample{0, 15, 15}},
[]chunks.Sample{fSample{0, 2, 2}, fSample{0, 20, 20}}, []chunks.Sample{fSample{0, 25, 25}, fSample{0, 30, 30}},
[]chunks.Sample{fSample{0, 18, 18}, fSample{0, 26, 26}}, []chunks.Sample{fSample{0, 31, 31}, fSample{0, 35, 35}},
),
},
{
@ -1059,7 +1059,7 @@ func (*mockChunkSeriesSet) Warnings() annotations.Annotations { return nil }
func TestChainSampleIterator(t *testing.T) {
for sampleType, sampleFunc := range map[string]func(int64) chunks.Sample{
"float": func(ts int64) chunks.Sample { return fSample{ts, float64(ts)} },
"float": func(ts int64) chunks.Sample { return fSample{-ts, ts, float64(ts)} },
"histogram": func(ts int64) chunks.Sample { return histogramSample(ts, uk) },
"float histogram": func(ts int64) chunks.Sample { return floatHistogramSample(ts, uk) },
} {
@ -1176,7 +1176,7 @@ func TestChainSampleIteratorHistogramCounterResetHint(t *testing.T) {
func TestChainSampleIteratorSeek(t *testing.T) {
for sampleType, sampleFunc := range map[string]func(int64) chunks.Sample{
"float": func(ts int64) chunks.Sample { return fSample{ts, float64(ts)} },
"float": func(ts int64) chunks.Sample { return fSample{-ts, ts, float64(ts)} },
"histogram": func(ts int64) chunks.Sample { return histogramSample(ts, uk) },
"float histogram": func(ts int64) chunks.Sample { return floatHistogramSample(ts, uk) },
} {
@ -1224,13 +1224,13 @@ func TestChainSampleIteratorSeek(t *testing.T) {
switch merged.Seek(tc.seek) {
case chunkenc.ValFloat:
t, f := merged.At()
actual = append(actual, fSample{t, f})
actual = append(actual, fSample{merged.AtST(), t, f})
case chunkenc.ValHistogram:
t, h := merged.AtHistogram(nil)
actual = append(actual, hSample{t, h})
actual = append(actual, hSample{merged.AtST(), t, h})
case chunkenc.ValFloatHistogram:
t, fh := merged.AtFloatHistogram(nil)
actual = append(actual, fhSample{t, fh})
actual = append(actual, fhSample{merged.AtST(), t, fh})
}
s, err := ExpandSamples(merged, nil)
require.NoError(t, err)
@ -1243,7 +1243,7 @@ func TestChainSampleIteratorSeek(t *testing.T) {
func TestChainSampleIteratorSeekFailingIterator(t *testing.T) {
merged := ChainSampleIteratorFromIterators(nil, []chunkenc.Iterator{
NewListSeriesIterator(samples{fSample{0, 0.1}, fSample{1, 1.1}, fSample{2, 2.1}}),
NewListSeriesIterator(samples{fSample{0, 0, 0.1}, fSample{0, 1, 1.1}, fSample{0, 2, 2.1}}),
errIterator{errors.New("something went wrong")},
})
@ -1253,7 +1253,7 @@ func TestChainSampleIteratorSeekFailingIterator(t *testing.T) {
func TestChainSampleIteratorNextImmediatelyFailingIterator(t *testing.T) {
merged := ChainSampleIteratorFromIterators(nil, []chunkenc.Iterator{
NewListSeriesIterator(samples{fSample{0, 0.1}, fSample{1, 1.1}, fSample{2, 2.1}}),
NewListSeriesIterator(samples{fSample{0, 0, 0.1}, fSample{0, 1, 1.1}, fSample{0, 2, 2.1}}),
errIterator{errors.New("something went wrong")},
})
@ -1263,7 +1263,7 @@ func TestChainSampleIteratorNextImmediatelyFailingIterator(t *testing.T) {
// Next() does some special handling for the first iterator, so make sure it handles the first iterator returning an error too.
merged = ChainSampleIteratorFromIterators(nil, []chunkenc.Iterator{
errIterator{errors.New("something went wrong")},
NewListSeriesIterator(samples{fSample{0, 0.1}, fSample{1, 1.1}, fSample{2, 2.1}}),
NewListSeriesIterator(samples{fSample{0, 0, 0.1}, fSample{0, 1, 1.1}, fSample{0, 2, 2.1}}),
})
require.Equal(t, chunkenc.ValNone, merged.Next())
@ -1310,13 +1310,13 @@ func TestChainSampleIteratorSeekHistogramCounterResetHint(t *testing.T) {
switch merged.Seek(tc.seek) {
case chunkenc.ValFloat:
t, f := merged.At()
actual = append(actual, fSample{t, f})
actual = append(actual, fSample{merged.AtST(), t, f})
case chunkenc.ValHistogram:
t, h := merged.AtHistogram(nil)
actual = append(actual, hSample{t, h})
actual = append(actual, hSample{merged.AtST(), t, h})
case chunkenc.ValFloatHistogram:
t, fh := merged.AtFloatHistogram(nil)
actual = append(actual, fhSample{t, fh})
actual = append(actual, fhSample{merged.AtST(), t, fh})
}
s, err := ExpandSamples(merged, nil)
require.NoError(t, err)
@ -1716,6 +1716,10 @@ func (errIterator) AtT() int64 {
return 0
}
func (errIterator) AtST() int64 {
return 0
}
func (e errIterator) Err() error {
return e.err
}

View file

@ -564,6 +564,12 @@ func (c *concreteSeriesIterator) AtT() int64 {
return c.series.floats[c.floatsCur].Timestamp
}
// TODO(krajorama): implement AtST. Maybe. concreteSeriesIterator is used
// for turning query results into an iterable, but query results do not have ST.
func (*concreteSeriesIterator) AtST() int64 {
return 0
}
const noTS = int64(math.MaxInt64)
// Next implements chunkenc.Iterator.
@ -832,6 +838,11 @@ func (it *chunkedSeriesIterator) AtT() int64 {
return it.cur.AtT()
}
// TODO(krajorama): test AtST once we have a chunk format that provides ST.
func (it *chunkedSeriesIterator) AtST() int64 {
return it.cur.AtST()
}
func (it *chunkedSeriesIterator) Err() error {
return it.err
}

View file

@ -1146,7 +1146,7 @@ func buildTestChunks(t *testing.T) []prompb.Chunk {
minTimeMs := time
for j := range numSamplesPerTestChunk {
a.Append(time, float64(i+j))
a.Append(0, time, float64(i+j))
time += int64(1000)
}

View file

@ -138,6 +138,11 @@ func (it *listSeriesIterator) AtT() int64 {
return s.T()
}
func (it *listSeriesIterator) AtST() int64 {
s := it.samples.Get(it.idx)
return s.ST()
}
func (it *listSeriesIterator) Next() chunkenc.ValueType {
it.idx++
if it.idx >= it.samples.Len() {
@ -355,18 +360,20 @@ func (s *seriesToChunkEncoder) Iterator(it chunks.Iterator) chunks.Iterator {
lastType = typ
var (
t int64
v float64
h *histogram.Histogram
fh *histogram.FloatHistogram
st, t int64
v float64
h *histogram.Histogram
fh *histogram.FloatHistogram
)
switch typ {
case chunkenc.ValFloat:
t, v = seriesIter.At()
app.Append(t, v)
st = seriesIter.AtST()
app.Append(st, t, v)
case chunkenc.ValHistogram:
t, h = seriesIter.AtHistogram(nil)
newChk, recoded, app, err = app.AppendHistogram(nil, t, h, false)
st = seriesIter.AtST()
newChk, recoded, app, err = app.AppendHistogram(nil, st, t, h, false)
if err != nil {
return errChunksIterator{err: err}
}
@ -381,7 +388,8 @@ func (s *seriesToChunkEncoder) Iterator(it chunks.Iterator) chunks.Iterator {
}
case chunkenc.ValFloatHistogram:
t, fh = seriesIter.AtFloatHistogram(nil)
newChk, recoded, app, err = app.AppendFloatHistogram(nil, t, fh, false)
st = seriesIter.AtST()
newChk, recoded, app, err = app.AppendFloatHistogram(nil, st, t, fh, false)
if err != nil {
return errChunksIterator{err: err}
}
@ -439,16 +447,16 @@ func (e errChunksIterator) Err() error { return e.err }
// ExpandSamples iterates over all samples in the iterator, buffering all in slice.
// Optionally it takes samples constructor, useful when you want to compare sample slices with different
// sample implementations. if nil, sample type from this package will be used.
func ExpandSamples(iter chunkenc.Iterator, newSampleFn func(t int64, f float64, h *histogram.Histogram, fh *histogram.FloatHistogram) chunks.Sample) ([]chunks.Sample, error) {
func ExpandSamples(iter chunkenc.Iterator, newSampleFn func(st, t int64, f float64, h *histogram.Histogram, fh *histogram.FloatHistogram) chunks.Sample) ([]chunks.Sample, error) {
if newSampleFn == nil {
newSampleFn = func(t int64, f float64, h *histogram.Histogram, fh *histogram.FloatHistogram) chunks.Sample {
newSampleFn = func(st, t int64, f float64, h *histogram.Histogram, fh *histogram.FloatHistogram) chunks.Sample {
switch {
case h != nil:
return hSample{t, h}
return hSample{st, t, h}
case fh != nil:
return fhSample{t, fh}
return fhSample{st, t, fh}
default:
return fSample{t, f}
return fSample{st, t, f}
}
}
}
@ -460,17 +468,20 @@ func ExpandSamples(iter chunkenc.Iterator, newSampleFn func(t int64, f float64,
return result, iter.Err()
case chunkenc.ValFloat:
t, f := iter.At()
st := iter.AtST()
// NaNs can't be compared normally, so substitute for another value.
if math.IsNaN(f) {
f = -42
}
result = append(result, newSampleFn(t, f, nil, nil))
result = append(result, newSampleFn(st, t, f, nil, nil))
case chunkenc.ValHistogram:
t, h := iter.AtHistogram(nil)
result = append(result, newSampleFn(t, 0, h, nil))
st := iter.AtST()
result = append(result, newSampleFn(st, t, 0, h, nil))
case chunkenc.ValFloatHistogram:
t, fh := iter.AtFloatHistogram(nil)
result = append(result, newSampleFn(t, 0, nil, fh))
st := iter.AtST()
result = append(result, newSampleFn(st, t, 0, nil, fh))
}
}
}

View file

@ -28,11 +28,11 @@ import (
func TestListSeriesIterator(t *testing.T) {
it := NewListSeriesIterator(samples{
fSample{0, 0},
fSample{1, 1},
fSample{1, 1.5},
fSample{2, 2},
fSample{3, 3},
fSample{-10, 0, 0},
fSample{-9, 1, 1},
fSample{-8, 1, 1.5},
fSample{-7, 2, 2},
fSample{-6, 3, 3},
})
// Seek to the first sample with ts=1.
@ -40,30 +40,35 @@ func TestListSeriesIterator(t *testing.T) {
ts, v := it.At()
require.Equal(t, int64(1), ts)
require.Equal(t, 1., v)
require.Equal(t, int64(-9), it.AtST())
// Seek one further, next sample still has ts=1.
require.Equal(t, chunkenc.ValFloat, it.Next())
ts, v = it.At()
require.Equal(t, int64(1), ts)
require.Equal(t, 1.5, v)
require.Equal(t, int64(-8), it.AtST())
// Seek again to 1 and make sure we stay where we are.
require.Equal(t, chunkenc.ValFloat, it.Seek(1))
ts, v = it.At()
require.Equal(t, int64(1), ts)
require.Equal(t, 1.5, v)
require.Equal(t, int64(-8), it.AtST())
// Another seek.
require.Equal(t, chunkenc.ValFloat, it.Seek(3))
ts, v = it.At()
require.Equal(t, int64(3), ts)
require.Equal(t, 3., v)
require.Equal(t, int64(-6), it.AtST())
// And we don't go back.
require.Equal(t, chunkenc.ValFloat, it.Seek(2))
ts, v = it.At()
require.Equal(t, int64(3), ts)
require.Equal(t, 3., v)
require.Equal(t, int64(-6), it.AtST())
// Seek beyond the end.
require.Equal(t, chunkenc.ValNone, it.Seek(5))

View file

@ -176,7 +176,7 @@ func TestCorruptedChunk(t *testing.T) {
t.Run(tc.name, func(t *testing.T) {
tmpdir := t.TempDir()
series := storage.NewListSeries(labels.FromStrings("a", "b"), []chunks.Sample{sample{1, 1, nil, nil}})
series := storage.NewListSeries(labels.FromStrings("a", "b"), []chunks.Sample{sample{0, 1, 1, nil, nil}})
blockDir := createBlock(t, tmpdir, []storage.Series{series})
files, err := sequenceFiles(chunkDir(blockDir))
require.NoError(t, err)
@ -236,7 +236,7 @@ func TestLabelValuesWithMatchers(t *testing.T) {
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.FromStrings(
"tens", fmt.Sprintf("value%d", i/10),
"unique", fmt.Sprintf("value%d", i),
), []chunks.Sample{sample{100, 0, nil, nil}}))
), []chunks.Sample{sample{0, 100, 0, nil, nil}}))
}
blockDir := createBlock(t, tmpdir, seriesEntries)
@ -319,7 +319,7 @@ func TestBlockQuerierReturnsSortedLabelValues(t *testing.T) {
for i := 100; i > 0; i-- {
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.FromStrings(
"__name__", fmt.Sprintf("value%d", i),
), []chunks.Sample{sample{100, 0, nil, nil}}))
), []chunks.Sample{sample{0, 100, 0, nil, nil}}))
}
blockDir := createBlock(t, tmpdir, seriesEntries)
@ -436,7 +436,7 @@ func BenchmarkLabelValuesWithMatchers(b *testing.B) {
"a_unique", fmt.Sprintf("value%d", i),
"b_tens", fmt.Sprintf("value%d", i/(metricCount/10)),
"c_ninety", fmt.Sprintf("value%d", i/(metricCount/10)/9), // "0" for the first 90%, then "1"
), []chunks.Sample{sample{100, 0, nil, nil}}))
), []chunks.Sample{sample{0, 100, 0, nil, nil}}))
}
blockDir := createBlock(b, tmpdir, seriesEntries)
@ -472,13 +472,13 @@ func TestLabelNamesWithMatchers(t *testing.T) {
for i := range 100 {
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.FromStrings(
"unique", fmt.Sprintf("value%d", i),
), []chunks.Sample{sample{100, 0, nil, nil}}))
), []chunks.Sample{sample{0, 100, 0, nil, nil}}))
if i%10 == 0 {
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.FromStrings(
"tens", fmt.Sprintf("value%d", i/10),
"unique", fmt.Sprintf("value%d", i),
), []chunks.Sample{sample{100, 0, nil, nil}}))
), []chunks.Sample{sample{0, 100, 0, nil, nil}}))
}
if i%20 == 0 {
@ -486,7 +486,7 @@ func TestLabelNamesWithMatchers(t *testing.T) {
"tens", fmt.Sprintf("value%d", i/10),
"twenties", fmt.Sprintf("value%d", i/20),
"unique", fmt.Sprintf("value%d", i),
), []chunks.Sample{sample{100, 0, nil, nil}}))
), []chunks.Sample{sample{0, 100, 0, nil, nil}}))
}
}
@ -542,7 +542,7 @@ func TestBlockIndexReader_PostingsForLabelMatching(t *testing.T) {
testPostingsForLabelMatching(t, 2, func(t *testing.T, series []labels.Labels) IndexReader {
var seriesEntries []storage.Series
for _, s := range series {
seriesEntries = append(seriesEntries, storage.NewListSeries(s, []chunks.Sample{sample{100, 0, nil, nil}}))
seriesEntries = append(seriesEntries, storage.NewListSeries(s, []chunks.Sample{sample{0, 100, 0, nil, nil}}))
}
blockDir := createBlock(t, t.TempDir(), seriesEntries)

View file

@ -99,9 +99,9 @@ type Iterable interface {
Iterator(Iterator) Iterator
}
// Appender adds sample pairs to a chunk.
// Appender adds sample with start timestamp, timestamp, and value to a chunk.
type Appender interface {
Append(int64, float64)
Append(st, t int64, v float64)
// AppendHistogram and AppendFloatHistogram append a histogram sample to a histogram or float histogram chunk.
// Appending a histogram may require creating a completely new chunk or recoding (changing) the current chunk.
@ -114,8 +114,8 @@ type Appender interface {
// The returned bool isRecoded can be used to distinguish between the new Chunk c being a completely new Chunk
// or the current Chunk recoded to a new Chunk.
// The Appender app that can be used for the next append is always returned.
AppendHistogram(prev *HistogramAppender, t int64, h *histogram.Histogram, appendOnly bool) (c Chunk, isRecoded bool, app Appender, err error)
AppendFloatHistogram(prev *FloatHistogramAppender, t int64, h *histogram.FloatHistogram, appendOnly bool) (c Chunk, isRecoded bool, app Appender, err error)
AppendHistogram(prev *HistogramAppender, st, t int64, h *histogram.Histogram, appendOnly bool) (c Chunk, isRecoded bool, app Appender, err error)
AppendFloatHistogram(prev *FloatHistogramAppender, st, t int64, h *histogram.FloatHistogram, appendOnly bool) (c Chunk, isRecoded bool, app Appender, err error)
}
// Iterator is a simple iterator that can only get the next value.
@ -151,6 +151,10 @@ type Iterator interface {
// AtT returns the current timestamp.
// Before the iterator has advanced, the behaviour is unspecified.
AtT() int64
// AtST returns the current start timestamp.
// Returns 0 if the start timestamp is not implemented or not set.
// Before the iterator has advanced, the behaviour is unspecified.
AtST() int64
// Err returns the current error. It should be used only after the
// iterator is exhausted, i.e. `Next` or `Seek` have returned ValNone.
Err() error
@ -208,25 +212,30 @@ func (v ValueType) NewChunk() (Chunk, error) {
}
}
// MockSeriesIterator returns an iterator for a mock series with custom timeStamps and values.
func MockSeriesIterator(timestamps []int64, values []float64) Iterator {
// MockSeriesIterator returns an iterator for a mock series with custom
// start timestamp, timestamps, and values.
// Start timestamps is optional, pass nil or empty slice to indicate no start
// timestamps.
func MockSeriesIterator(startTimestamps, timestamps []int64, values []float64) Iterator {
return &mockSeriesIterator{
timeStamps: timestamps,
values: values,
currIndex: -1,
startTimestamps: startTimestamps,
timestamps: timestamps,
values: values,
currIndex: -1,
}
}
type mockSeriesIterator struct {
timeStamps []int64
values []float64
currIndex int
timestamps []int64
startTimestamps []int64
values []float64
currIndex int
}
func (*mockSeriesIterator) Seek(int64) ValueType { return ValNone }
func (it *mockSeriesIterator) At() (int64, float64) {
return it.timeStamps[it.currIndex], it.values[it.currIndex]
return it.timestamps[it.currIndex], it.values[it.currIndex]
}
func (*mockSeriesIterator) AtHistogram(*histogram.Histogram) (int64, *histogram.Histogram) {
@ -238,11 +247,18 @@ func (*mockSeriesIterator) AtFloatHistogram(*histogram.FloatHistogram) (int64, *
}
func (it *mockSeriesIterator) AtT() int64 {
return it.timeStamps[it.currIndex]
return it.timestamps[it.currIndex]
}
func (it *mockSeriesIterator) AtST() int64 {
if len(it.startTimestamps) == 0 {
return 0
}
return it.startTimestamps[it.currIndex]
}
func (it *mockSeriesIterator) Next() ValueType {
if it.currIndex < len(it.timeStamps)-1 {
if it.currIndex < len(it.timestamps)-1 {
it.currIndex++
return ValFloat
}
@ -268,8 +284,9 @@ func (nopIterator) AtHistogram(*histogram.Histogram) (int64, *histogram.Histogra
func (nopIterator) AtFloatHistogram(*histogram.FloatHistogram) (int64, *histogram.FloatHistogram) {
return math.MinInt64, nil
}
func (nopIterator) AtT() int64 { return math.MinInt64 }
func (nopIterator) Err() error { return nil }
func (nopIterator) AtT() int64 { return math.MinInt64 }
func (nopIterator) AtST() int64 { return 0 }
func (nopIterator) Err() error { return nil }
// Pool is used to create and reuse chunk references to avoid allocations.
type Pool interface {

View file

@ -65,7 +65,7 @@ func testChunk(t *testing.T, c Chunk) {
require.NoError(t, err)
}
app.Append(ts, v)
app.Append(0, ts, v)
exp = append(exp, pair{t: ts, v: v})
}
@ -226,7 +226,7 @@ func benchmarkIterator(b *testing.B, newChunk func() Chunk) {
if j > 250 {
break
}
a.Append(p.t, p.v)
a.Append(0, p.t, p.v)
j++
}
}
@ -303,7 +303,7 @@ func benchmarkAppender(b *testing.B, deltas func() (int64, float64), newChunk fu
b.Fatalf("get appender: %s", err)
}
for _, p := range exp {
a.Append(p.t, p.v)
a.Append(0, p.t, p.v)
}
}
}

View file

@ -195,7 +195,7 @@ func (a *FloatHistogramAppender) NumSamples() int {
// Append implements Appender. This implementation panics because normal float
// samples must never be appended to a histogram chunk.
func (*FloatHistogramAppender) Append(int64, float64) {
func (*FloatHistogramAppender) Append(int64, int64, float64) {
panic("appended a float sample to a histogram chunk")
}
@ -682,11 +682,11 @@ func (*FloatHistogramAppender) recodeHistogram(
}
}
func (*FloatHistogramAppender) AppendHistogram(*HistogramAppender, int64, *histogram.Histogram, bool) (Chunk, bool, Appender, error) {
func (*FloatHistogramAppender) AppendHistogram(*HistogramAppender, int64, int64, *histogram.Histogram, bool) (Chunk, bool, Appender, error) {
panic("appended a histogram sample to a float histogram chunk")
}
func (a *FloatHistogramAppender) AppendFloatHistogram(prev *FloatHistogramAppender, t int64, h *histogram.FloatHistogram, appendOnly bool) (Chunk, bool, Appender, error) {
func (a *FloatHistogramAppender) AppendFloatHistogram(prev *FloatHistogramAppender, _, t int64, h *histogram.FloatHistogram, appendOnly bool) (Chunk, bool, Appender, error) {
if a.NumSamples() == 0 {
a.appendFloatHistogram(t, h)
if h.CounterResetHint == histogram.GaugeType {
@ -938,6 +938,10 @@ func (it *floatHistogramIterator) AtT() int64 {
return it.t
}
func (*floatHistogramIterator) AtST() int64 {
return 0
}
func (it *floatHistogramIterator) Err() error {
return it.err
}

View file

@ -63,7 +63,7 @@ func TestFirstFloatHistogramExplicitCounterReset(t *testing.T) {
chk := NewFloatHistogramChunk()
app, err := chk.Appender()
require.NoError(t, err)
newChk, recoded, newApp, err := app.AppendFloatHistogram(nil, 0, h, false)
newChk, recoded, newApp, err := app.AppendFloatHistogram(nil, 0, 0, h, false)
require.NoError(t, err)
require.Nil(t, newChk)
require.False(t, recoded)
@ -101,7 +101,7 @@ func TestFloatHistogramChunkSameBuckets(t *testing.T) {
},
NegativeBuckets: []int64{2, 1, -1, -1}, // counts: 2, 3, 2, 1 (total 8)
}
chk, _, app, err := app.AppendFloatHistogram(nil, ts, h.ToFloat(nil), false)
chk, _, app, err := app.AppendFloatHistogram(nil, 0, ts, h.ToFloat(nil), false)
require.NoError(t, err)
require.Nil(t, chk)
exp = append(exp, floatResult{t: ts, h: h.ToFloat(nil)})
@ -115,7 +115,7 @@ func TestFloatHistogramChunkSameBuckets(t *testing.T) {
h.Sum = 24.4
h.PositiveBuckets = []int64{5, -2, 1, -2} // counts: 5, 3, 4, 2 (total 14)
h.NegativeBuckets = []int64{4, -1, 1, -1} // counts: 4, 3, 4, 4 (total 15)
chk, _, _, err = app.AppendFloatHistogram(nil, ts, h.ToFloat(nil), false)
chk, _, _, err = app.AppendFloatHistogram(nil, 0, ts, h.ToFloat(nil), false)
require.NoError(t, err)
require.Nil(t, chk)
expH := h.ToFloat(nil)
@ -134,7 +134,7 @@ func TestFloatHistogramChunkSameBuckets(t *testing.T) {
h.Sum = 24.4
h.PositiveBuckets = []int64{6, 1, -3, 6} // counts: 6, 7, 4, 10 (total 27)
h.NegativeBuckets = []int64{5, 1, -2, 3} // counts: 5, 6, 4, 7 (total 22)
chk, _, _, err = app.AppendFloatHistogram(nil, ts, h.ToFloat(nil), false)
chk, _, _, err = app.AppendFloatHistogram(nil, 0, ts, h.ToFloat(nil), false)
require.NoError(t, err)
require.Nil(t, chk)
expH = h.ToFloat(nil)
@ -224,7 +224,7 @@ func TestFloatHistogramChunkBucketChanges(t *testing.T) {
NegativeBuckets: []int64{1},
}
chk, _, app, err := app.AppendFloatHistogram(nil, ts1, h1.ToFloat(nil), false)
chk, _, app, err := app.AppendFloatHistogram(nil, 0, ts1, h1.ToFloat(nil), false)
require.NoError(t, err)
require.Nil(t, chk)
require.Equal(t, 1, c.NumSamples())
@ -260,7 +260,7 @@ func TestFloatHistogramChunkBucketChanges(t *testing.T) {
require.True(t, ok) // Only new buckets came in.
require.False(t, cr)
c, app = hApp.recode(posInterjections, negInterjections, h2.PositiveSpans, h2.NegativeSpans)
chk, _, _, err = app.AppendFloatHistogram(nil, ts2, h2.ToFloat(nil), false)
chk, _, _, err = app.AppendFloatHistogram(nil, 0, ts2, h2.ToFloat(nil), false)
require.NoError(t, err)
require.Nil(t, chk)
require.Equal(t, 2, c.NumSamples())
@ -330,7 +330,7 @@ func TestFloatHistogramChunkAppendable(t *testing.T) {
ts := int64(1234567890)
chk, _, app, err := app.AppendFloatHistogram(nil, ts, h.Copy(), false)
chk, _, app, err := app.AppendFloatHistogram(nil, 0, ts, h.Copy(), false)
require.NoError(t, err)
require.Nil(t, chk)
require.Equal(t, 1, c.NumSamples())
@ -557,7 +557,7 @@ func TestFloatHistogramChunkAppendable(t *testing.T) {
nextChunk := NewFloatHistogramChunk()
app, err := nextChunk.Appender()
require.NoError(t, err)
newChunk, recoded, newApp, err := app.AppendFloatHistogram(hApp, ts+1, h2, false)
newChunk, recoded, newApp, err := app.AppendFloatHistogram(hApp, 0, ts+1, h2, false)
require.NoError(t, err)
require.Nil(t, newChunk)
require.False(t, recoded)
@ -575,7 +575,7 @@ func TestFloatHistogramChunkAppendable(t *testing.T) {
nextChunk := NewFloatHistogramChunk()
app, err := nextChunk.Appender()
require.NoError(t, err)
newChunk, recoded, newApp, err := app.AppendFloatHistogram(hApp, ts+1, h2, false)
newChunk, recoded, newApp, err := app.AppendFloatHistogram(hApp, 0, ts+1, h2, false)
require.NoError(t, err)
require.Nil(t, newChunk)
require.False(t, recoded)
@ -602,7 +602,7 @@ func TestFloatHistogramChunkAppendable(t *testing.T) {
nextChunk := NewFloatHistogramChunk()
app, err := nextChunk.Appender()
require.NoError(t, err)
newChunk, recoded, newApp, err := app.AppendFloatHistogram(hApp, ts+1, h2, false)
newChunk, recoded, newApp, err := app.AppendFloatHistogram(hApp, 0, ts+1, h2, false)
require.NoError(t, err)
require.Nil(t, newChunk)
require.False(t, recoded)
@ -717,7 +717,7 @@ func TestFloatHistogramChunkAppendable(t *testing.T) {
func assertNewFloatHistogramChunkOnAppend(t *testing.T, oldChunk Chunk, hApp *FloatHistogramAppender, ts int64, h *histogram.FloatHistogram, expectHeader CounterResetHeader, expectHint histogram.CounterResetHint) {
oldChunkBytes := oldChunk.Bytes()
newChunk, recoded, newAppender, err := hApp.AppendFloatHistogram(nil, ts, h, false)
newChunk, recoded, newAppender, err := hApp.AppendFloatHistogram(nil, 0, ts, h, false)
require.Equal(t, oldChunkBytes, oldChunk.Bytes()) // Sanity check that previous chunk is untouched.
require.NoError(t, err)
require.NotNil(t, newChunk)
@ -732,7 +732,7 @@ func assertNewFloatHistogramChunkOnAppend(t *testing.T, oldChunk Chunk, hApp *Fl
func assertNoNewFloatHistogramChunkOnAppend(t *testing.T, oldChunk Chunk, hApp *FloatHistogramAppender, ts int64, h *histogram.FloatHistogram, expectHeader CounterResetHeader) {
oldChunkBytes := oldChunk.Bytes()
newChunk, recoded, newAppender, err := hApp.AppendFloatHistogram(nil, ts, h, false)
newChunk, recoded, newAppender, err := hApp.AppendFloatHistogram(nil, 0, ts, h, false)
require.Greater(t, len(oldChunk.Bytes()), len(oldChunkBytes)) // Check that current chunk is bigger than previously.
require.NoError(t, err)
require.Nil(t, newChunk)
@ -745,7 +745,7 @@ func assertNoNewFloatHistogramChunkOnAppend(t *testing.T, oldChunk Chunk, hApp *
func assertRecodedFloatHistogramChunkOnAppend(t *testing.T, prevChunk Chunk, hApp *FloatHistogramAppender, ts int64, h *histogram.FloatHistogram, expectHeader CounterResetHeader) {
prevChunkBytes := prevChunk.Bytes()
newChunk, recoded, newAppender, err := hApp.AppendFloatHistogram(nil, ts, h, false)
newChunk, recoded, newAppender, err := hApp.AppendFloatHistogram(nil, 0, ts, h, false)
require.Equal(t, prevChunkBytes, prevChunk.Bytes()) // Sanity check that previous chunk is untouched. This may change in the future if we implement in-place recoding.
require.NoError(t, err)
require.NotNil(t, newChunk)
@ -959,7 +959,7 @@ func TestFloatHistogramChunkAppendableWithEmptySpan(t *testing.T) {
require.NoError(t, err)
require.Equal(t, 0, c.NumSamples())
_, _, _, err = app.AppendFloatHistogram(nil, 1, tc.h1, true)
_, _, _, err = app.AppendFloatHistogram(nil, 0, 1, tc.h1, true)
require.NoError(t, err)
require.Equal(t, 1, c.NumSamples())
hApp, _ := app.(*FloatHistogramAppender)
@ -1019,7 +1019,7 @@ func TestFloatHistogramChunkAppendableGauge(t *testing.T) {
ts := int64(1234567890)
chk, _, app, err := app.AppendFloatHistogram(nil, ts, h.Copy(), false)
chk, _, app, err := app.AppendFloatHistogram(nil, 0, ts, h.Copy(), false)
require.NoError(t, err)
require.Nil(t, chk)
require.Equal(t, 1, c.NumSamples())
@ -1259,7 +1259,7 @@ func TestFloatHistogramAppendOnlyErrors(t *testing.T) {
h := tsdbutil.GenerateTestFloatHistogram(0)
var isRecoded bool
c, isRecoded, app, err = app.AppendFloatHistogram(nil, 1, h, true)
c, isRecoded, app, err = app.AppendFloatHistogram(nil, 0, 1, h, true)
require.Nil(t, c)
require.False(t, isRecoded)
require.NoError(t, err)
@ -1267,7 +1267,7 @@ func TestFloatHistogramAppendOnlyErrors(t *testing.T) {
// Add erroring histogram.
h2 := h.Copy()
h2.Schema++
c, isRecoded, _, err = app.AppendFloatHistogram(nil, 2, h2, true)
c, isRecoded, _, err = app.AppendFloatHistogram(nil, 0, 2, h2, true)
require.Nil(t, c)
require.False(t, isRecoded)
require.EqualError(t, err, "float histogram schema change")
@ -1281,7 +1281,7 @@ func TestFloatHistogramAppendOnlyErrors(t *testing.T) {
h := tsdbutil.GenerateTestFloatHistogram(0)
var isRecoded bool
c, isRecoded, app, err = app.AppendFloatHistogram(nil, 1, h, true)
c, isRecoded, app, err = app.AppendFloatHistogram(nil, 0, 1, h, true)
require.Nil(t, c)
require.False(t, isRecoded)
require.NoError(t, err)
@ -1289,7 +1289,7 @@ func TestFloatHistogramAppendOnlyErrors(t *testing.T) {
// Add erroring histogram.
h2 := h.Copy()
h2.CounterResetHint = histogram.CounterReset
c, isRecoded, _, err = app.AppendFloatHistogram(nil, 2, h2, true)
c, isRecoded, _, err = app.AppendFloatHistogram(nil, 0, 2, h2, true)
require.Nil(t, c)
require.False(t, isRecoded)
require.EqualError(t, err, "float histogram counter reset")
@ -1303,7 +1303,7 @@ func TestFloatHistogramAppendOnlyErrors(t *testing.T) {
h := tsdbutil.GenerateTestCustomBucketsFloatHistogram(0)
var isRecoded bool
c, isRecoded, app, err = app.AppendFloatHistogram(nil, 1, h, true)
c, isRecoded, app, err = app.AppendFloatHistogram(nil, 0, 1, h, true)
require.Nil(t, c)
require.False(t, isRecoded)
require.NoError(t, err)
@ -1311,7 +1311,7 @@ func TestFloatHistogramAppendOnlyErrors(t *testing.T) {
// Add erroring histogram.
h2 := h.Copy()
h2.CustomValues = []float64{0, 1, 2, 3, 4, 5, 6, 7}
c, isRecoded, _, err = app.AppendFloatHistogram(nil, 2, h2, true)
c, isRecoded, _, err = app.AppendFloatHistogram(nil, 0, 2, h2, true)
require.Nil(t, c)
require.False(t, isRecoded)
require.EqualError(t, err, "float histogram counter reset")
@ -1344,10 +1344,10 @@ func TestFloatHistogramUniqueSpansAfterNext(t *testing.T) {
app, err := c.Appender()
require.NoError(t, err)
_, _, _, err = app.AppendFloatHistogram(nil, 0, h1, false)
_, _, _, err = app.AppendFloatHistogram(nil, 0, 0, h1, false)
require.NoError(t, err)
_, _, _, err = app.AppendFloatHistogram(nil, 1, h2, false)
_, _, _, err = app.AppendFloatHistogram(nil, 0, 1, h2, false)
require.NoError(t, err)
// Create an iterator and advance to the first histogram.
@ -1390,10 +1390,10 @@ func TestFloatHistogramUniqueCustomValuesAfterNext(t *testing.T) {
app, err := c.Appender()
require.NoError(t, err)
_, _, _, err = app.AppendFloatHistogram(nil, 0, h1, false)
_, _, _, err = app.AppendFloatHistogram(nil, 0, 0, h1, false)
require.NoError(t, err)
_, _, _, err = app.AppendFloatHistogram(nil, 1, h2, false)
_, _, _, err = app.AppendFloatHistogram(nil, 0, 1, h2, false)
require.NoError(t, err)
// Create an iterator and advance to the first histogram.
@ -1435,7 +1435,7 @@ func TestFloatHistogramEmptyBucketsWithGaps(t *testing.T) {
c := NewFloatHistogramChunk()
app, err := c.Appender()
require.NoError(t, err)
_, _, _, err = app.AppendFloatHistogram(nil, 1, h1, false)
_, _, _, err = app.AppendFloatHistogram(nil, 0, 1, h1, false)
require.NoError(t, err)
h2 := &histogram.FloatHistogram{
@ -1448,7 +1448,7 @@ func TestFloatHistogramEmptyBucketsWithGaps(t *testing.T) {
}
require.NoError(t, h2.Validate())
newC, recoded, _, err := app.AppendFloatHistogram(nil, 2, h2, false)
newC, recoded, _, err := app.AppendFloatHistogram(nil, 0, 2, h2, false)
require.NoError(t, err)
require.True(t, recoded)
require.NotNil(t, newC)
@ -1483,7 +1483,7 @@ func TestFloatHistogramIteratorFailIfSchemaInValid(t *testing.T) {
app, err := c.Appender()
require.NoError(t, err)
_, _, _, err = app.AppendFloatHistogram(nil, 1, h, false)
_, _, _, err = app.AppendFloatHistogram(nil, 0, 1, h, false)
require.NoError(t, err)
it := c.Iterator(nil)
@ -1512,7 +1512,7 @@ func TestFloatHistogramIteratorReduceSchema(t *testing.T) {
app, err := c.Appender()
require.NoError(t, err)
_, _, _, err = app.AppendFloatHistogram(nil, 1, h, false)
_, _, _, err = app.AppendFloatHistogram(nil, 0, 1, h, false)
require.NoError(t, err)
it := c.Iterator(nil)

View file

@ -219,7 +219,7 @@ func (a *HistogramAppender) NumSamples() int {
// Append implements Appender. This implementation panics because normal float
// samples must never be appended to a histogram chunk.
func (*HistogramAppender) Append(int64, float64) {
func (*HistogramAppender) Append(int64, int64, float64) {
panic("appended a float sample to a histogram chunk")
}
@ -734,11 +734,11 @@ func (a *HistogramAppender) writeSumDelta(v float64) {
xorWrite(a.b, v, a.sum, &a.leading, &a.trailing)
}
func (*HistogramAppender) AppendFloatHistogram(*FloatHistogramAppender, int64, *histogram.FloatHistogram, bool) (Chunk, bool, Appender, error) {
func (*HistogramAppender) AppendFloatHistogram(*FloatHistogramAppender, int64, int64, *histogram.FloatHistogram, bool) (Chunk, bool, Appender, error) {
panic("appended a float histogram sample to a histogram chunk")
}
func (a *HistogramAppender) AppendHistogram(prev *HistogramAppender, t int64, h *histogram.Histogram, appendOnly bool) (Chunk, bool, Appender, error) {
func (a *HistogramAppender) AppendHistogram(prev *HistogramAppender, _, t int64, h *histogram.Histogram, appendOnly bool) (Chunk, bool, Appender, error) {
if a.NumSamples() == 0 {
a.appendHistogram(t, h)
if h.CounterResetHint == histogram.GaugeType {
@ -1075,6 +1075,10 @@ func (it *histogramIterator) AtT() int64 {
return it.t
}
func (*histogramIterator) AtST() int64 {
return 0
}
func (it *histogramIterator) Err() error {
return it.err
}

View file

@ -64,7 +64,7 @@ func TestFirstHistogramExplicitCounterReset(t *testing.T) {
chk := NewHistogramChunk()
app, err := chk.Appender()
require.NoError(t, err)
newChk, recoded, newApp, err := app.AppendHistogram(nil, 0, h, false)
newChk, recoded, newApp, err := app.AppendHistogram(nil, 0, 0, h, false)
require.NoError(t, err)
require.Nil(t, newChk)
require.False(t, recoded)
@ -102,7 +102,7 @@ func TestHistogramChunkSameBuckets(t *testing.T) {
},
NegativeBuckets: []int64{2, 1, -1, -1}, // counts: 2, 3, 2, 1 (total 8)
}
chk, _, app, err := app.AppendHistogram(nil, ts, h, false)
chk, _, app, err := app.AppendHistogram(nil, 0, ts, h, false)
require.NoError(t, err)
require.Nil(t, chk)
exp = append(exp, result{t: ts, h: h, fh: h.ToFloat(nil)})
@ -116,7 +116,7 @@ func TestHistogramChunkSameBuckets(t *testing.T) {
h.Sum = 24.4
h.PositiveBuckets = []int64{5, -2, 1, -2} // counts: 5, 3, 4, 2 (total 14)
h.NegativeBuckets = []int64{4, -1, 1, -1} // counts: 4, 3, 4, 4 (total 15)
chk, _, _, err = app.AppendHistogram(nil, ts, h, false)
chk, _, _, err = app.AppendHistogram(nil, 0, ts, h, false)
require.NoError(t, err)
require.Nil(t, chk)
hExp := h.Copy()
@ -135,7 +135,7 @@ func TestHistogramChunkSameBuckets(t *testing.T) {
h.Sum = 24.4
h.PositiveBuckets = []int64{6, 1, -3, 6} // counts: 6, 7, 4, 10 (total 27)
h.NegativeBuckets = []int64{5, 1, -2, 3} // counts: 5, 6, 4, 7 (total 22)
chk, _, _, err = app.AppendHistogram(nil, ts, h, false)
chk, _, _, err = app.AppendHistogram(nil, 0, ts, h, false)
require.NoError(t, err)
require.Nil(t, chk)
hExp = h.Copy()
@ -235,7 +235,7 @@ func TestHistogramChunkBucketChanges(t *testing.T) {
NegativeBuckets: []int64{1},
}
chk, _, app, err := app.AppendHistogram(nil, ts1, h1, false)
chk, _, app, err := app.AppendHistogram(nil, 0, ts1, h1, false)
require.NoError(t, err)
require.Nil(t, chk)
require.Equal(t, 1, c.NumSamples())
@ -271,7 +271,7 @@ func TestHistogramChunkBucketChanges(t *testing.T) {
require.True(t, ok) // Only new buckets came in.
require.Equal(t, NotCounterReset, cr)
c, app = hApp.recode(posInterjections, negInterjections, h2.PositiveSpans, h2.NegativeSpans)
chk, _, _, err = app.AppendHistogram(nil, ts2, h2, false)
chk, _, _, err = app.AppendHistogram(nil, 0, ts2, h2, false)
require.NoError(t, err)
require.Nil(t, chk)
@ -344,7 +344,7 @@ func TestHistogramChunkAppendable(t *testing.T) {
ts := int64(1234567890)
chk, _, app, err := app.AppendHistogram(nil, ts, h.Copy(), false)
chk, _, app, err := app.AppendHistogram(nil, 0, ts, h.Copy(), false)
require.NoError(t, err)
require.Nil(t, chk)
require.Equal(t, 1, c.NumSamples())
@ -581,7 +581,7 @@ func TestHistogramChunkAppendable(t *testing.T) {
nextChunk := NewHistogramChunk()
app, err := nextChunk.Appender()
require.NoError(t, err)
newChunk, recoded, newApp, err := app.AppendHistogram(hApp, ts+1, h2, false)
newChunk, recoded, newApp, err := app.AppendHistogram(hApp, 0, ts+1, h2, false)
require.NoError(t, err)
require.Nil(t, newChunk)
require.False(t, recoded)
@ -599,7 +599,7 @@ func TestHistogramChunkAppendable(t *testing.T) {
nextChunk := NewHistogramChunk()
app, err := nextChunk.Appender()
require.NoError(t, err)
newChunk, recoded, newApp, err := app.AppendHistogram(hApp, ts+1, h2, false)
newChunk, recoded, newApp, err := app.AppendHistogram(hApp, 0, ts+1, h2, false)
require.NoError(t, err)
require.Nil(t, newChunk)
require.False(t, recoded)
@ -629,7 +629,7 @@ func TestHistogramChunkAppendable(t *testing.T) {
nextChunk := NewHistogramChunk()
app, err := nextChunk.Appender()
require.NoError(t, err)
newChunk, recoded, newApp, err := app.AppendHistogram(hApp, ts+1, h2, false)
newChunk, recoded, newApp, err := app.AppendHistogram(hApp, 0, ts+1, h2, false)
require.NoError(t, err)
require.Nil(t, newChunk)
require.False(t, recoded)
@ -776,7 +776,7 @@ func TestHistogramChunkAppendable(t *testing.T) {
func assertNewHistogramChunkOnAppend(t *testing.T, oldChunk Chunk, hApp *HistogramAppender, ts int64, h *histogram.Histogram, expectHeader CounterResetHeader, expectHint histogram.CounterResetHint) {
oldChunkBytes := oldChunk.Bytes()
newChunk, recoded, newAppender, err := hApp.AppendHistogram(nil, ts, h, false)
newChunk, recoded, newAppender, err := hApp.AppendHistogram(nil, 0, ts, h, false)
require.Equal(t, oldChunkBytes, oldChunk.Bytes()) // Sanity check that previous chunk is untouched.
require.NoError(t, err)
require.NotNil(t, newChunk)
@ -791,7 +791,7 @@ func assertNewHistogramChunkOnAppend(t *testing.T, oldChunk Chunk, hApp *Histogr
func assertNoNewHistogramChunkOnAppend(t *testing.T, currChunk Chunk, hApp *HistogramAppender, ts int64, h *histogram.Histogram, expectHeader CounterResetHeader) {
prevChunkBytes := currChunk.Bytes()
newChunk, recoded, newAppender, err := hApp.AppendHistogram(nil, ts, h, false)
newChunk, recoded, newAppender, err := hApp.AppendHistogram(nil, 0, ts, h, false)
require.Greater(t, len(currChunk.Bytes()), len(prevChunkBytes)) // Check that current chunk is bigger than previously.
require.NoError(t, err)
require.Nil(t, newChunk)
@ -804,7 +804,7 @@ func assertNoNewHistogramChunkOnAppend(t *testing.T, currChunk Chunk, hApp *Hist
func assertRecodedHistogramChunkOnAppend(t *testing.T, prevChunk Chunk, hApp *HistogramAppender, ts int64, h *histogram.Histogram, expectHeader CounterResetHeader) {
prevChunkBytes := prevChunk.Bytes()
newChunk, recoded, newAppender, err := hApp.AppendHistogram(nil, ts, h, false)
newChunk, recoded, newAppender, err := hApp.AppendHistogram(nil, 0, ts, h, false)
require.Equal(t, prevChunkBytes, prevChunk.Bytes()) // Sanity check that previous chunk is untouched. This may change in the future if we implement in-place recoding.
require.NoError(t, err)
require.NotNil(t, newChunk)
@ -1029,7 +1029,7 @@ func TestHistogramChunkAppendableWithEmptySpan(t *testing.T) {
require.NoError(t, err)
require.Equal(t, 0, c.NumSamples())
_, _, _, err = app.AppendHistogram(nil, 1, tc.h1, true)
_, _, _, err = app.AppendHistogram(nil, 1, 0, tc.h1, true)
require.NoError(t, err)
require.Equal(t, 1, c.NumSamples())
hApp, _ := app.(*HistogramAppender)
@ -1172,7 +1172,7 @@ func TestAtFloatHistogram(t *testing.T) {
app, err := chk.Appender()
require.NoError(t, err)
for i := range input {
newc, _, _, err := app.AppendHistogram(nil, int64(i), &input[i], false)
newc, _, _, err := app.AppendHistogram(nil, 0, int64(i), &input[i], false)
require.NoError(t, err)
require.Nil(t, newc)
}
@ -1230,7 +1230,7 @@ func TestHistogramChunkAppendableGauge(t *testing.T) {
ts := int64(1234567890)
chk, _, app, err := app.AppendHistogram(nil, ts, h.Copy(), false)
chk, _, app, err := app.AppendHistogram(nil, 0, ts, h.Copy(), false)
require.NoError(t, err)
require.Nil(t, chk)
require.Equal(t, 1, c.NumSamples())
@ -1471,7 +1471,7 @@ func TestHistogramAppendOnlyErrors(t *testing.T) {
h := tsdbutil.GenerateTestHistogram(0)
var isRecoded bool
c, isRecoded, app, err = app.AppendHistogram(nil, 1, h, true)
c, isRecoded, app, err = app.AppendHistogram(nil, 0, 1, h, true)
require.Nil(t, c)
require.False(t, isRecoded)
require.NoError(t, err)
@ -1479,7 +1479,7 @@ func TestHistogramAppendOnlyErrors(t *testing.T) {
// Add erroring histogram.
h2 := h.Copy()
h2.Schema++
c, isRecoded, _, err = app.AppendHistogram(nil, 2, h2, true)
c, isRecoded, _, err = app.AppendHistogram(nil, 0, 2, h2, true)
require.Nil(t, c)
require.False(t, isRecoded)
require.EqualError(t, err, "histogram schema change")
@ -1493,7 +1493,7 @@ func TestHistogramAppendOnlyErrors(t *testing.T) {
h := tsdbutil.GenerateTestHistogram(0)
var isRecoded bool
c, isRecoded, app, err = app.AppendHistogram(nil, 1, h, true)
c, isRecoded, app, err = app.AppendHistogram(nil, 0, 1, h, true)
require.Nil(t, c)
require.False(t, isRecoded)
require.NoError(t, err)
@ -1501,7 +1501,7 @@ func TestHistogramAppendOnlyErrors(t *testing.T) {
// Add erroring histogram.
h2 := h.Copy()
h2.CounterResetHint = histogram.CounterReset
c, isRecoded, _, err = app.AppendHistogram(nil, 2, h2, true)
c, isRecoded, _, err = app.AppendHistogram(nil, 0, 2, h2, true)
require.Nil(t, c)
require.False(t, isRecoded)
require.EqualError(t, err, "histogram counter reset")
@ -1515,7 +1515,7 @@ func TestHistogramAppendOnlyErrors(t *testing.T) {
h := tsdbutil.GenerateTestCustomBucketsHistogram(0)
var isRecoded bool
c, isRecoded, app, err = app.AppendHistogram(nil, 1, h, true)
c, isRecoded, app, err = app.AppendHistogram(nil, 0, 1, h, true)
require.Nil(t, c)
require.False(t, isRecoded)
require.NoError(t, err)
@ -1523,7 +1523,7 @@ func TestHistogramAppendOnlyErrors(t *testing.T) {
// Add erroring histogram.
h2 := h.Copy()
h2.CustomValues = []float64{0, 1, 2, 3, 4, 5, 6, 7}
c, isRecoded, _, err = app.AppendHistogram(nil, 2, h2, true)
c, isRecoded, _, err = app.AppendHistogram(nil, 0, 2, h2, true)
require.Nil(t, c)
require.False(t, isRecoded)
require.EqualError(t, err, "histogram counter reset")
@ -1556,10 +1556,10 @@ func TestHistogramUniqueSpansAfterNextWithAtHistogram(t *testing.T) {
app, err := c.Appender()
require.NoError(t, err)
_, _, _, err = app.AppendHistogram(nil, 0, h1, false)
_, _, _, err = app.AppendHistogram(nil, 0, 0, h1, false)
require.NoError(t, err)
_, _, _, err = app.AppendHistogram(nil, 1, h2, false)
_, _, _, err = app.AppendHistogram(nil, 0, 1, h2, false)
require.NoError(t, err)
// Create an iterator and advance to the first histogram.
@ -1607,10 +1607,10 @@ func TestHistogramUniqueSpansAfterNextWithAtFloatHistogram(t *testing.T) {
app, err := c.Appender()
require.NoError(t, err)
_, _, _, err = app.AppendHistogram(nil, 0, h1, false)
_, _, _, err = app.AppendHistogram(nil, 0, 0, h1, false)
require.NoError(t, err)
_, _, _, err = app.AppendHistogram(nil, 1, h2, false)
_, _, _, err = app.AppendHistogram(nil, 0, 1, h2, false)
require.NoError(t, err)
// Create an iterator and advance to the first histogram.
@ -1653,10 +1653,10 @@ func TestHistogramCustomValuesInternedAfterNextWithAtHistogram(t *testing.T) {
app, err := c.Appender()
require.NoError(t, err)
_, _, _, err = app.AppendHistogram(nil, 0, h1, false)
_, _, _, err = app.AppendHistogram(nil, 0, 0, h1, false)
require.NoError(t, err)
_, _, _, err = app.AppendHistogram(nil, 1, h2, false)
_, _, _, err = app.AppendHistogram(nil, 0, 1, h2, false)
require.NoError(t, err)
// Create an iterator and advance to the first histogram.
@ -1699,10 +1699,10 @@ func TestHistogramCustomValuesInternedAfterNextWithAtFloatHistogram(t *testing.T
app, err := c.Appender()
require.NoError(t, err)
_, _, _, err = app.AppendHistogram(nil, 0, h1, false)
_, _, _, err = app.AppendHistogram(nil, 0, 0, h1, false)
require.NoError(t, err)
_, _, _, err = app.AppendHistogram(nil, 1, h2, false)
_, _, _, err = app.AppendHistogram(nil, 0, 1, h2, false)
require.NoError(t, err)
// Create an iterator and advance to the first histogram.
@ -1754,7 +1754,7 @@ func BenchmarkAppendable(b *testing.B) {
b.Fatal(err)
}
_, _, _, err = app.AppendHistogram(nil, 1, h, true)
_, _, _, err = app.AppendHistogram(nil, 0, 1, h, true)
if err != nil {
b.Fatal(err)
}
@ -1791,7 +1791,7 @@ func TestIntHistogramEmptyBucketsWithGaps(t *testing.T) {
c := NewHistogramChunk()
app, err := c.Appender()
require.NoError(t, err)
_, _, _, err = app.AppendHistogram(nil, 1, h1, false)
_, _, _, err = app.AppendHistogram(nil, 0, 1, h1, false)
require.NoError(t, err)
h2 := &histogram.Histogram{
@ -1804,7 +1804,7 @@ func TestIntHistogramEmptyBucketsWithGaps(t *testing.T) {
}
require.NoError(t, h2.Validate())
newC, recoded, _, err := app.AppendHistogram(nil, 2, h2, false)
newC, recoded, _, err := app.AppendHistogram(nil, 0, 2, h2, false)
require.NoError(t, err)
require.True(t, recoded)
require.NotNil(t, newC)
@ -1839,7 +1839,7 @@ func TestHistogramIteratorFailIfSchemaInValid(t *testing.T) {
app, err := c.Appender()
require.NoError(t, err)
_, _, _, err = app.AppendHistogram(nil, 1, h, false)
_, _, _, err = app.AppendHistogram(nil, 0, 1, h, false)
require.NoError(t, err)
it := c.Iterator(nil)
@ -1868,7 +1868,7 @@ func TestHistogramIteratorReduceSchema(t *testing.T) {
app, err := c.Appender()
require.NoError(t, err)
_, _, _, err = app.AppendHistogram(nil, 1, h, false)
_, _, _, err = app.AppendHistogram(nil, 0, 1, h, false)
require.NoError(t, err)
it := c.Iterator(nil)

View file

@ -158,7 +158,7 @@ type xorAppender struct {
trailing uint8
}
func (a *xorAppender) Append(t int64, v float64) {
func (a *xorAppender) Append(_, t int64, v float64) {
var tDelta uint64
num := binary.BigEndian.Uint16(a.b.bytes())
switch num {
@ -225,11 +225,11 @@ func (a *xorAppender) writeVDelta(v float64) {
xorWrite(a.b, v, a.v, &a.leading, &a.trailing)
}
func (*xorAppender) AppendHistogram(*HistogramAppender, int64, *histogram.Histogram, bool) (Chunk, bool, Appender, error) {
func (*xorAppender) AppendHistogram(*HistogramAppender, int64, int64, *histogram.Histogram, bool) (Chunk, bool, Appender, error) {
panic("appended a histogram sample to a float chunk")
}
func (*xorAppender) AppendFloatHistogram(*FloatHistogramAppender, int64, *histogram.FloatHistogram, bool) (Chunk, bool, Appender, error) {
func (*xorAppender) AppendFloatHistogram(*FloatHistogramAppender, int64, int64, *histogram.FloatHistogram, bool) (Chunk, bool, Appender, error) {
panic("appended a float histogram sample to a float chunk")
}
@ -277,6 +277,10 @@ func (it *xorIterator) AtT() int64 {
return it.t
}
func (*xorIterator) AtST() int64 {
return 0
}
func (it *xorIterator) Err() error {
return it.err
}

View file

@ -24,7 +24,7 @@ func BenchmarkXorRead(b *testing.B) {
app, err := c.Appender()
require.NoError(b, err)
for i := int64(0); i < 120*1000; i += 1000 {
app.Append(i, float64(i)+float64(i)/10+float64(i)/100+float64(i)/1000)
app.Append(0, i, float64(i)+float64(i)/10+float64(i)/100+float64(i)/1000)
}
b.ReportAllocs()

View file

@ -135,6 +135,7 @@ type Meta struct {
}
// ChunkFromSamples requires all samples to have the same type.
// TODO(krajorama): test with ST when chunk formats support it.
func ChunkFromSamples(s []Sample) (Meta, error) {
return ChunkFromSamplesGeneric(SampleSlice(s))
}
@ -164,9 +165,9 @@ func ChunkFromSamplesGeneric(s Samples) (Meta, error) {
for i := 0; i < s.Len(); i++ {
switch sampleType {
case chunkenc.ValFloat:
ca.Append(s.Get(i).T(), s.Get(i).F())
ca.Append(s.Get(i).ST(), s.Get(i).T(), s.Get(i).F())
case chunkenc.ValHistogram:
newChunk, _, ca, err = ca.AppendHistogram(nil, s.Get(i).T(), s.Get(i).H(), false)
newChunk, _, ca, err = ca.AppendHistogram(nil, s.Get(i).ST(), s.Get(i).T(), s.Get(i).H(), false)
if err != nil {
return emptyChunk, err
}
@ -174,7 +175,7 @@ func ChunkFromSamplesGeneric(s Samples) (Meta, error) {
return emptyChunk, errors.New("did not expect to start a second chunk")
}
case chunkenc.ValFloatHistogram:
newChunk, _, ca, err = ca.AppendFloatHistogram(nil, s.Get(i).T(), s.Get(i).FH(), false)
newChunk, _, ca, err = ca.AppendFloatHistogram(nil, s.Get(i).ST(), s.Get(i).T(), s.Get(i).FH(), false)
if err != nil {
return emptyChunk, err
}

View file

@ -559,7 +559,7 @@ func randomChunk(t *testing.T) chunkenc.Chunk {
app, err := chunk.Appender()
require.NoError(t, err)
for range length {
app.Append(rand.Int63(), rand.Float64())
app.Append(0, rand.Int63(), rand.Float64())
}
return chunk
}

View file

@ -25,6 +25,7 @@ type Samples interface {
type Sample interface {
T() int64
ST() int64
F() float64
H() *histogram.Histogram
FH() *histogram.FloatHistogram
@ -38,16 +39,20 @@ func (s SampleSlice) Get(i int) Sample { return s[i] }
func (s SampleSlice) Len() int { return len(s) }
type sample struct {
t int64
f float64
h *histogram.Histogram
fh *histogram.FloatHistogram
st, t int64
f float64
h *histogram.Histogram
fh *histogram.FloatHistogram
}
func (s sample) T() int64 {
return s.t
}
func (s sample) ST() int64 {
return s.st
}
func (s sample) F() float64 {
return s.f
}

View file

@ -372,7 +372,7 @@ func TestDeleteSimple_AppendV2(t *testing.T) {
expSamples := make([]chunks.Sample, 0, len(c.remaint))
for _, ts := range c.remaint {
expSamples = append(expSamples, sample{ts, smpls[ts], nil, nil})
expSamples = append(expSamples, sample{0, ts, smpls[ts], nil, nil})
}
expss := newMockSeriesSet([]storage.Series{
@ -507,7 +507,7 @@ func TestSkippingInvalidValuesInSameTxn_AppendV2(t *testing.T) {
ssMap := query(t, q, labels.MustNewMatcher(labels.MatchEqual, "a", "b"))
require.Equal(t, map[string][]chunks.Sample{
labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 1, nil, nil}},
labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 0, 1, nil, nil}},
}, ssMap)
// Append Out of Order Value.
@ -524,7 +524,7 @@ func TestSkippingInvalidValuesInSameTxn_AppendV2(t *testing.T) {
ssMap = query(t, q, labels.MustNewMatcher(labels.MatchEqual, "a", "b"))
require.Equal(t, map[string][]chunks.Sample{
labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 1, nil, nil}, sample{10, 3, nil, nil}},
labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 0, 1, nil, nil}, sample{0, 10, 3, nil, nil}},
}, ssMap)
}
@ -669,7 +669,7 @@ func TestDB_SnapshotWithDelete_AppendV2(t *testing.T) {
expSamples := make([]chunks.Sample, 0, len(c.remaint))
for _, ts := range c.remaint {
expSamples = append(expSamples, sample{ts, smpls[ts], nil, nil})
expSamples = append(expSamples, sample{0, ts, smpls[ts], nil, nil})
}
expss := newMockSeriesSet([]storage.Series{
@ -772,7 +772,7 @@ func TestDB_e2e_AppendV2(t *testing.T) {
for range numDatapoints {
v := rand.Float64()
series = append(series, sample{ts, v, nil, nil})
series = append(series, sample{0, ts, v, nil, nil})
_, err := app.Append(0, lset, 0, ts, v, nil, nil, storage.AOptions{})
require.NoError(t, err)
@ -1094,7 +1094,7 @@ func TestTombstoneClean_AppendV2(t *testing.T) {
expSamples := make([]chunks.Sample, 0, len(c.remaint))
for _, ts := range c.remaint {
expSamples = append(expSamples, sample{ts, smpls[ts], nil, nil})
expSamples = append(expSamples, sample{0, ts, smpls[ts], nil, nil})
}
expss := newMockSeriesSet([]storage.Series{
@ -2310,7 +2310,7 @@ func TestCompactHead_AppendV2(t *testing.T) {
val := rand.Float64()
_, err := app.Append(0, labels.FromStrings("a", "b"), 0, int64(i), val, nil, nil, storage.AOptions{})
require.NoError(t, err)
expSamples = append(expSamples, sample{int64(i), val, nil, nil})
expSamples = append(expSamples, sample{0, int64(i), val, nil, nil})
}
require.NoError(t, app.Commit())
@ -2337,7 +2337,7 @@ func TestCompactHead_AppendV2(t *testing.T) {
series = seriesSet.At().Iterator(series)
for series.Next() == chunkenc.ValFloat {
time, val := series.At()
actSamples = append(actSamples, sample{time, val, nil, nil})
actSamples = append(actSamples, sample{0, time, val, nil, nil})
}
require.NoError(t, series.Err())
}

View file

@ -546,7 +546,7 @@ func TestDeleteSimple(t *testing.T) {
expSamples := make([]chunks.Sample, 0, len(c.remaint))
for _, ts := range c.remaint {
expSamples = append(expSamples, sample{ts, smpls[ts], nil, nil})
expSamples = append(expSamples, sample{0, ts, smpls[ts], nil, nil})
}
expss := newMockSeriesSet([]storage.Series{
@ -691,7 +691,7 @@ func TestSkippingInvalidValuesInSameTxn(t *testing.T) {
ssMap := query(t, q, labels.MustNewMatcher(labels.MatchEqual, "a", "b"))
require.Equal(t, map[string][]chunks.Sample{
labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 1, nil, nil}},
labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 0, 1, nil, nil}},
}, ssMap)
// Append Out of Order Value.
@ -708,7 +708,7 @@ func TestSkippingInvalidValuesInSameTxn(t *testing.T) {
ssMap = query(t, q, labels.MustNewMatcher(labels.MatchEqual, "a", "b"))
require.Equal(t, map[string][]chunks.Sample{
labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 1, nil, nil}, sample{10, 3, nil, nil}},
labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 0, 1, nil, nil}, sample{0, 10, 3, nil, nil}},
}, ssMap)
}
@ -853,7 +853,7 @@ func TestDB_SnapshotWithDelete(t *testing.T) {
expSamples := make([]chunks.Sample, 0, len(c.remaint))
for _, ts := range c.remaint {
expSamples = append(expSamples, sample{ts, smpls[ts], nil, nil})
expSamples = append(expSamples, sample{0, ts, smpls[ts], nil, nil})
}
expss := newMockSeriesSet([]storage.Series{
@ -956,7 +956,7 @@ func TestDB_e2e(t *testing.T) {
for range numDatapoints {
v := rand.Float64()
series = append(series, sample{ts, v, nil, nil})
series = append(series, sample{0, ts, v, nil, nil})
_, err := app.Append(0, lset, ts, v)
require.NoError(t, err)
@ -1278,7 +1278,7 @@ func TestTombstoneClean(t *testing.T) {
expSamples := make([]chunks.Sample, 0, len(c.remaint))
for _, ts := range c.remaint {
expSamples = append(expSamples, sample{ts, smpls[ts], nil, nil})
expSamples = append(expSamples, sample{0, ts, smpls[ts], nil, nil})
}
expss := newMockSeriesSet([]storage.Series{
@ -2863,11 +2863,11 @@ func assureChunkFromSamples(t *testing.T, samples []chunks.Sample) chunks.Meta {
// TestChunkWriter_ReadAfterWrite ensures that chunk segment are cut at the set segment size and
// that the resulted segments includes the expected chunks data.
func TestChunkWriter_ReadAfterWrite(t *testing.T) {
chk1 := assureChunkFromSamples(t, []chunks.Sample{sample{1, 1, nil, nil}})
chk2 := assureChunkFromSamples(t, []chunks.Sample{sample{1, 2, nil, nil}})
chk3 := assureChunkFromSamples(t, []chunks.Sample{sample{1, 3, nil, nil}})
chk4 := assureChunkFromSamples(t, []chunks.Sample{sample{1, 4, nil, nil}})
chk5 := assureChunkFromSamples(t, []chunks.Sample{sample{1, 5, nil, nil}})
chk1 := assureChunkFromSamples(t, []chunks.Sample{sample{0, 1, 1, nil, nil}})
chk2 := assureChunkFromSamples(t, []chunks.Sample{sample{0, 1, 2, nil, nil}})
chk3 := assureChunkFromSamples(t, []chunks.Sample{sample{0, 1, 3, nil, nil}})
chk4 := assureChunkFromSamples(t, []chunks.Sample{sample{0, 1, 4, nil, nil}})
chk5 := assureChunkFromSamples(t, []chunks.Sample{sample{0, 1, 5, nil, nil}})
chunkSize := len(chk1.Chunk.Bytes()) + chunks.MaxChunkLengthFieldSize + chunks.ChunkEncodingSize + crc32.Size
tests := []struct {
@ -3069,11 +3069,11 @@ func TestRangeForTimestamp(t *testing.T) {
func TestChunkReader_ConcurrentReads(t *testing.T) {
t.Parallel()
chks := []chunks.Meta{
assureChunkFromSamples(t, []chunks.Sample{sample{1, 1, nil, nil}}),
assureChunkFromSamples(t, []chunks.Sample{sample{1, 2, nil, nil}}),
assureChunkFromSamples(t, []chunks.Sample{sample{1, 3, nil, nil}}),
assureChunkFromSamples(t, []chunks.Sample{sample{1, 4, nil, nil}}),
assureChunkFromSamples(t, []chunks.Sample{sample{1, 5, nil, nil}}),
assureChunkFromSamples(t, []chunks.Sample{sample{0, 1, 1, nil, nil}}),
assureChunkFromSamples(t, []chunks.Sample{sample{0, 1, 2, nil, nil}}),
assureChunkFromSamples(t, []chunks.Sample{sample{0, 1, 3, nil, nil}}),
assureChunkFromSamples(t, []chunks.Sample{sample{0, 1, 4, nil, nil}}),
assureChunkFromSamples(t, []chunks.Sample{sample{0, 1, 5, nil, nil}}),
}
tempDir := t.TempDir()
@ -3133,7 +3133,7 @@ func TestCompactHead(t *testing.T) {
val := rand.Float64()
_, err := app.Append(0, labels.FromStrings("a", "b"), int64(i), val)
require.NoError(t, err)
expSamples = append(expSamples, sample{int64(i), val, nil, nil})
expSamples = append(expSamples, sample{0, int64(i), val, nil, nil})
}
require.NoError(t, app.Commit())
@ -3160,7 +3160,7 @@ func TestCompactHead(t *testing.T) {
series = seriesSet.At().Iterator(series)
for series.Next() == chunkenc.ValFloat {
time, val := series.At()
actSamples = append(actSamples, sample{time, val, nil, nil})
actSamples = append(actSamples, sample{0, time, val, nil, nil})
}
require.NoError(t, series.Err())
}

View file

@ -2101,17 +2101,20 @@ func (s *stripeSeries) postCreation(lset labels.Labels) {
}
type sample struct {
st int64
t int64
f float64
h *histogram.Histogram
fh *histogram.FloatHistogram
}
func newSample(t int64, v float64, h *histogram.Histogram, fh *histogram.FloatHistogram) chunks.Sample {
return sample{t, v, h, fh}
func newSample(st, t int64, v float64, h *histogram.Histogram, fh *histogram.FloatHistogram) chunks.Sample {
return sample{st, t, v, h, fh}
}
func (s sample) T() int64 { return s.t }
func (s sample) T() int64 { return s.t }
func (s sample) ST() int64 { return s.st }
func (s sample) F() float64 { return s.f }
func (s sample) H() *histogram.Histogram { return s.h }
func (s sample) FH() *histogram.FloatHistogram { return s.fh }

View file

@ -1843,7 +1843,8 @@ func (s *memSeries) append(t int64, v float64, appendID uint64, o chunkOpts) (sa
if !sampleInOrder {
return sampleInOrder, chunkCreated
}
s.app.Append(t, v)
// TODO(krajorama): pass ST.
s.app.Append(0, t, v)
c.maxTime = t
@ -1885,7 +1886,8 @@ func (s *memSeries) appendHistogram(t int64, h *histogram.Histogram, appendID ui
prevApp = nil
}
newChunk, recoded, s.app, _ = s.app.AppendHistogram(prevApp, t, h, false) // false=request a new chunk if needed
// TODO(krajorama): pass ST.
newChunk, recoded, s.app, _ = s.app.AppendHistogram(prevApp, 0, t, h, false) // false=request a new chunk if needed
s.lastHistogramValue = h
s.lastFloatHistogramValue = nil
@ -1942,7 +1944,8 @@ func (s *memSeries) appendFloatHistogram(t int64, fh *histogram.FloatHistogram,
prevApp = nil
}
newChunk, recoded, s.app, _ = s.app.AppendFloatHistogram(prevApp, t, fh, false) // False means request a new chunk if needed.
// TODO(krajorama): pass ST.
newChunk, recoded, s.app, _ = s.app.AppendFloatHistogram(prevApp, 0, t, fh, false) // False means request a new chunk if needed.
s.lastHistogramValue = nil
s.lastFloatHistogramValue = fh

View file

@ -312,8 +312,8 @@ func TestHeadAppenderV2_WALMultiRef(t *testing.T) {
// The samples before the new ref should be discarded since Head truncation
// happens only after compacting the Head.
require.Equal(t, map[string][]chunks.Sample{`{foo="bar"}`: {
sample{1700, 3, nil, nil},
sample{2000, 4, nil, nil},
sample{0, 1700, 3, nil, nil},
sample{0, 2000, 4, nil, nil},
}}, series)
}
@ -605,7 +605,7 @@ func TestHeadAppenderV2_DeleteUntilCurrMax(t *testing.T) {
it = exps.Iterator(nil)
resSamples, err := storage.ExpandSamples(it, newSample)
require.NoError(t, err)
require.Equal(t, []chunks.Sample{sample{11, 1, nil, nil}}, resSamples)
require.Equal(t, []chunks.Sample{sample{0, 11, 1, nil, nil}}, resSamples)
for res.Next() {
}
require.NoError(t, res.Err())
@ -722,7 +722,7 @@ func TestHeadAppenderV2_Delete_e2e(t *testing.T) {
v := rand.Float64()
_, err := app.Append(0, ls, 0, ts, v, nil, nil, storage.AOptions{})
require.NoError(t, err)
series = append(series, sample{ts, v, nil, nil})
series = append(series, sample{0, ts, v, nil, nil})
ts += rand.Int63n(timeInterval) + 1
}
seriesMap[labels.New(l...).String()] = series
@ -1520,7 +1520,7 @@ func TestDataMissingOnQueryDuringCompaction_AppenderV2(t *testing.T) {
ref, err = app.Append(ref, labels.FromStrings("a", "b"), 0, ts, float64(i), nil, nil, storage.AOptions{})
require.NoError(t, err)
maxt = ts
expSamples = append(expSamples, sample{ts, float64(i), nil, nil})
expSamples = append(expSamples, sample{0, ts, float64(i), nil, nil})
}
require.NoError(t, app.Commit())
@ -2166,17 +2166,17 @@ func TestChunkSnapshot_AppenderV2(t *testing.T) {
aOpts.Exemplars = []exemplar.Exemplar{newExemplar(lbls, ts)}
}
val := rand.Float64()
expSeries[lblStr] = append(expSeries[lblStr], sample{ts, val, nil, nil})
expSeries[lblStr] = append(expSeries[lblStr], sample{0, ts, val, nil, nil})
_, err := app.Append(0, lbls, 0, ts, val, nil, nil, aOpts)
require.NoError(t, err)
hist := histograms[int(ts)]
expHist[lblsHistStr] = append(expHist[lblsHistStr], sample{ts, 0, hist, nil})
expHist[lblsHistStr] = append(expHist[lblsHistStr], sample{0, ts, 0, hist, nil})
_, err = app.Append(0, lblsHist, 0, ts, 0, hist, nil, storage.AOptions{})
require.NoError(t, err)
floatHist := floatHistogram[int(ts)]
expFloatHist[lblsFloatHistStr] = append(expFloatHist[lblsFloatHistStr], sample{ts, 0, nil, floatHist})
expFloatHist[lblsFloatHistStr] = append(expFloatHist[lblsFloatHistStr], sample{0, ts, 0, nil, floatHist})
_, err = app.Append(0, lblsFloatHist, 0, ts, 0, nil, floatHist, storage.AOptions{})
require.NoError(t, err)
@ -2244,17 +2244,17 @@ func TestChunkSnapshot_AppenderV2(t *testing.T) {
aOpts.Exemplars = []exemplar.Exemplar{newExemplar(lbls, ts)}
}
val := rand.Float64()
expSeries[lblStr] = append(expSeries[lblStr], sample{ts, val, nil, nil})
expSeries[lblStr] = append(expSeries[lblStr], sample{0, ts, val, nil, nil})
_, err := app.Append(0, lbls, 0, ts, val, nil, nil, aOpts)
require.NoError(t, err)
hist := histograms[int(ts)]
expHist[lblsHistStr] = append(expHist[lblsHistStr], sample{ts, 0, hist, nil})
expHist[lblsHistStr] = append(expHist[lblsHistStr], sample{0, ts, 0, hist, nil})
_, err = app.Append(0, lblsHist, 0, ts, 0, hist, nil, storage.AOptions{})
require.NoError(t, err)
floatHist := floatHistogram[int(ts)]
expFloatHist[lblsFloatHistStr] = append(expFloatHist[lblsFloatHistStr], sample{ts, 0, nil, floatHist})
expFloatHist[lblsFloatHistStr] = append(expFloatHist[lblsFloatHistStr], sample{0, ts, 0, nil, floatHist})
_, err = app.Append(0, lblsFloatHist, 0, ts, 0, nil, floatHist, storage.AOptions{})
require.NoError(t, err)

View file

@ -745,7 +745,7 @@ func TestHead_ReadWAL(t *testing.T) {
// Verify samples and exemplar for series 10.
c, _, _, err := s10.chunk(0, head.chunkDiskMapper, &head.memChunkPool)
require.NoError(t, err)
require.Equal(t, []sample{{100, 2, nil, nil}, {101, 5, nil, nil}}, expandChunk(c.chunk.Iterator(nil)))
require.Equal(t, []sample{{0, 100, 2, nil, nil}, {0, 101, 5, nil, nil}}, expandChunk(c.chunk.Iterator(nil)))
q, err := head.ExemplarQuerier(context.Background())
require.NoError(t, err)
@ -758,14 +758,14 @@ func TestHead_ReadWAL(t *testing.T) {
// Verify samples for series 50
c, _, _, err = s50.chunk(0, head.chunkDiskMapper, &head.memChunkPool)
require.NoError(t, err)
require.Equal(t, []sample{{101, 6, nil, nil}}, expandChunk(c.chunk.Iterator(nil)))
require.Equal(t, []sample{{0, 101, 6, nil, nil}}, expandChunk(c.chunk.Iterator(nil)))
// Verify records for series 100 and its duplicate, series 101.
// The samples before the new series record should be discarded since a duplicate record
// is only possible when old samples were compacted.
c, _, _, err = s100.chunk(0, head.chunkDiskMapper, &head.memChunkPool)
require.NoError(t, err)
require.Equal(t, []sample{{101, 7, nil, nil}}, expandChunk(c.chunk.Iterator(nil)))
require.Equal(t, []sample{{0, 101, 7, nil, nil}}, expandChunk(c.chunk.Iterator(nil)))
q, err = head.ExemplarQuerier(context.Background())
require.NoError(t, err)
@ -841,8 +841,8 @@ func TestHead_WALMultiRef(t *testing.T) {
// The samples before the new ref should be discarded since Head truncation
// happens only after compacting the Head.
require.Equal(t, map[string][]chunks.Sample{`{foo="bar"}`: {
sample{1700, 3, nil, nil},
sample{2000, 4, nil, nil},
sample{0, 1700, 3, nil, nil},
sample{0, 2000, 4, nil, nil},
}}, series)
}
@ -1859,7 +1859,7 @@ func TestDeleteUntilCurMax(t *testing.T) {
it = exps.Iterator(nil)
resSamples, err := storage.ExpandSamples(it, newSample)
require.NoError(t, err)
require.Equal(t, []chunks.Sample{sample{11, 1, nil, nil}}, resSamples)
require.Equal(t, []chunks.Sample{sample{0, 11, 1, nil, nil}}, resSamples)
for res.Next() {
}
require.NoError(t, res.Err())
@ -1976,7 +1976,7 @@ func TestDelete_e2e(t *testing.T) {
v := rand.Float64()
_, err := app.Append(0, ls, ts, v)
require.NoError(t, err)
series = append(series, sample{ts, v, nil, nil})
series = append(series, sample{0, ts, v, nil, nil})
ts += rand.Int63n(timeInterval) + 1
}
seriesMap[labels.New(l...).String()] = series
@ -3838,7 +3838,7 @@ func TestDataMissingOnQueryDuringCompaction(t *testing.T) {
ref, err = app.Append(ref, labels.FromStrings("a", "b"), ts, float64(i))
require.NoError(t, err)
maxt = ts
expSamples = append(expSamples, sample{ts, float64(i), nil, nil})
expSamples = append(expSamples, sample{0, ts, float64(i), nil, nil})
}
require.NoError(t, app.Commit())
@ -4503,17 +4503,17 @@ func TestChunkSnapshot(t *testing.T) {
// 240 samples should m-map at least 1 chunk.
for ts := int64(1); ts <= 240; ts++ {
val := rand.Float64()
expSeries[lblStr] = append(expSeries[lblStr], sample{ts, val, nil, nil})
expSeries[lblStr] = append(expSeries[lblStr], sample{0, ts, val, nil, nil})
ref, err := app.Append(0, lbls, ts, val)
require.NoError(t, err)
hist := histograms[int(ts)]
expHist[lblsHistStr] = append(expHist[lblsHistStr], sample{ts, 0, hist, nil})
expHist[lblsHistStr] = append(expHist[lblsHistStr], sample{0, ts, 0, hist, nil})
_, err = app.AppendHistogram(0, lblsHist, ts, hist, nil)
require.NoError(t, err)
floatHist := floatHistogram[int(ts)]
expFloatHist[lblsFloatHistStr] = append(expFloatHist[lblsFloatHistStr], sample{ts, 0, nil, floatHist})
expFloatHist[lblsFloatHistStr] = append(expFloatHist[lblsFloatHistStr], sample{0, ts, 0, nil, floatHist})
_, err = app.AppendHistogram(0, lblsFloatHist, ts, nil, floatHist)
require.NoError(t, err)
@ -4577,17 +4577,17 @@ func TestChunkSnapshot(t *testing.T) {
// 240 samples should m-map at least 1 chunk.
for ts := int64(241); ts <= 480; ts++ {
val := rand.Float64()
expSeries[lblStr] = append(expSeries[lblStr], sample{ts, val, nil, nil})
expSeries[lblStr] = append(expSeries[lblStr], sample{0, ts, val, nil, nil})
ref, err := app.Append(0, lbls, ts, val)
require.NoError(t, err)
hist := histograms[int(ts)]
expHist[lblsHistStr] = append(expHist[lblsHistStr], sample{ts, 0, hist, nil})
expHist[lblsHistStr] = append(expHist[lblsHistStr], sample{0, ts, 0, hist, nil})
_, err = app.AppendHistogram(0, lblsHist, ts, hist, nil)
require.NoError(t, err)
floatHist := floatHistogram[int(ts)]
expFloatHist[lblsFloatHistStr] = append(expFloatHist[lblsFloatHistStr], sample{ts, 0, nil, floatHist})
expFloatHist[lblsFloatHistStr] = append(expFloatHist[lblsFloatHistStr], sample{0, ts, 0, nil, floatHist})
_, err = app.AppendHistogram(0, lblsFloatHist, ts, nil, floatHist)
require.NoError(t, err)

View file

@ -40,7 +40,8 @@ func (o *OOOChunk) Insert(t int64, v float64, h *histogram.Histogram, fh *histog
// try to append at the end first if the new timestamp is higher than the
// last known timestamp.
if len(o.samples) == 0 || t > o.samples[len(o.samples)-1].t {
o.samples = append(o.samples, sample{t, v, h, fh})
// TODO(krajorama): pass ST.
o.samples = append(o.samples, sample{0, t, v, h, fh})
return true
}
@ -49,7 +50,8 @@ func (o *OOOChunk) Insert(t int64, v float64, h *histogram.Histogram, fh *histog
if i >= len(o.samples) {
// none found. append it at the end
o.samples = append(o.samples, sample{t, v, h, fh})
// TODO(krajorama): pass ST.
o.samples = append(o.samples, sample{0, t, v, h, fh})
return true
}
@ -61,7 +63,8 @@ func (o *OOOChunk) Insert(t int64, v float64, h *histogram.Histogram, fh *histog
// Expand length by 1 to make room. use a zero sample, we will overwrite it anyway.
o.samples = append(o.samples, sample{})
copy(o.samples[i+1:], o.samples[i:])
o.samples[i] = sample{t, v, h, fh}
// TODO(krajorama): pass ST.
o.samples[i] = sample{0, t, v, h, fh}
return true
}
@ -125,7 +128,8 @@ func (o *OOOChunk) ToEncodedChunks(mint, maxt int64) (chks []memChunk, err error
}
switch encoding {
case chunkenc.EncXOR:
app.Append(s.t, s.f)
// TODO(krajorama): pass ST.
app.Append(0, s.t, s.f)
case chunkenc.EncHistogram:
// Ignoring ok is ok, since we don't want to compare to the wrong previous appender anyway.
prevHApp, _ := prevApp.(*chunkenc.HistogramAppender)
@ -133,7 +137,8 @@ func (o *OOOChunk) ToEncodedChunks(mint, maxt int64) (chks []memChunk, err error
newChunk chunkenc.Chunk
recoded bool
)
newChunk, recoded, app, _ = app.AppendHistogram(prevHApp, s.t, s.h, false)
// TODO(krajorama): pass ST.
newChunk, recoded, app, _ = app.AppendHistogram(prevHApp, 0, s.t, s.h, false)
if newChunk != nil { // A new chunk was allocated.
if !recoded {
chks = append(chks, memChunk{chunk, cmint, cmaxt, nil})
@ -148,7 +153,8 @@ func (o *OOOChunk) ToEncodedChunks(mint, maxt int64) (chks []memChunk, err error
newChunk chunkenc.Chunk
recoded bool
)
newChunk, recoded, app, _ = app.AppendFloatHistogram(prevHApp, s.t, s.fh, false)
// TODO(krajorama): pass ST.
newChunk, recoded, app, _ = app.AppendFloatHistogram(prevHApp, 0, s.t, s.fh, false)
if newChunk != nil { // A new chunk was allocated.
if !recoded {
chks = append(chks, memChunk{chunk, cmint, cmaxt, nil})

View file

@ -788,6 +788,11 @@ func (p *populateWithDelSeriesIterator) AtT() int64 {
return p.curr.AtT()
}
// AtST TODO(krajorama): test AtST() when chunks support it.
func (p *populateWithDelSeriesIterator) AtST() int64 {
return p.curr.AtST()
}
func (p *populateWithDelSeriesIterator) Err() error {
if err := p.populateWithDelGenericSeriesIterator.Err(); err != nil {
return err
@ -862,6 +867,7 @@ func (p *populateWithDelChunkSeriesIterator) Next() bool {
// populateCurrForSingleChunk sets the fields within p.currMetaWithChunk. This
// should be called if the samples in p.currDelIter only form one chunk.
// TODO(krajorama): test ST when chunks support it.
func (p *populateWithDelChunkSeriesIterator) populateCurrForSingleChunk() bool {
valueType := p.currDelIter.Next()
if valueType == chunkenc.ValNone {
@ -877,7 +883,7 @@ func (p *populateWithDelChunkSeriesIterator) populateCurrForSingleChunk() bool {
var (
newChunk chunkenc.Chunk
app chunkenc.Appender
t int64
st, t int64
err error
)
switch valueType {
@ -893,7 +899,8 @@ func (p *populateWithDelChunkSeriesIterator) populateCurrForSingleChunk() bool {
}
var h *histogram.Histogram
t, h = p.currDelIter.AtHistogram(nil)
_, _, app, err = app.AppendHistogram(nil, t, h, true)
st = p.currDelIter.AtST()
_, _, app, err = app.AppendHistogram(nil, st, t, h, true)
if err != nil {
break
}
@ -910,7 +917,8 @@ func (p *populateWithDelChunkSeriesIterator) populateCurrForSingleChunk() bool {
}
var v float64
t, v = p.currDelIter.At()
app.Append(t, v)
st = p.currDelIter.AtST()
app.Append(st, t, v)
}
case chunkenc.ValFloatHistogram:
newChunk = chunkenc.NewFloatHistogramChunk()
@ -924,7 +932,8 @@ func (p *populateWithDelChunkSeriesIterator) populateCurrForSingleChunk() bool {
}
var h *histogram.FloatHistogram
t, h = p.currDelIter.AtFloatHistogram(nil)
_, _, app, err = app.AppendFloatHistogram(nil, t, h, true)
st = p.currDelIter.AtST()
_, _, app, err = app.AppendFloatHistogram(nil, st, t, h, true)
if err != nil {
break
}
@ -950,6 +959,7 @@ func (p *populateWithDelChunkSeriesIterator) populateCurrForSingleChunk() bool {
// populateChunksFromIterable reads the samples from currDelIter to create
// chunks for chunksFromIterable. It also sets p.currMetaWithChunk to the first
// chunk.
// TODO(krajorama): test ST when chunks support it.
func (p *populateWithDelChunkSeriesIterator) populateChunksFromIterable() bool {
p.chunksFromIterable = p.chunksFromIterable[:0]
p.chunksFromIterableIdx = -1
@ -965,7 +975,7 @@ func (p *populateWithDelChunkSeriesIterator) populateChunksFromIterable() bool {
var (
// t is the timestamp for the current sample.
t int64
st, t int64
cmint int64
cmaxt int64
@ -1004,23 +1014,26 @@ func (p *populateWithDelChunkSeriesIterator) populateChunksFromIterable() bool {
{
var v float64
t, v = p.currDelIter.At()
app.Append(t, v)
st = p.currDelIter.AtST()
app.Append(st, t, v)
}
case chunkenc.ValHistogram:
{
var v *histogram.Histogram
t, v = p.currDelIter.AtHistogram(nil)
st = p.currDelIter.AtST()
// No need to set prevApp as AppendHistogram will set the
// counter reset header for the appender that's returned.
newChunk, recoded, app, err = app.AppendHistogram(nil, t, v, false)
newChunk, recoded, app, err = app.AppendHistogram(nil, st, t, v, false)
}
case chunkenc.ValFloatHistogram:
{
var v *histogram.FloatHistogram
t, v = p.currDelIter.AtFloatHistogram(nil)
st = p.currDelIter.AtST()
// No need to set prevApp as AppendHistogram will set the
// counter reset header for the appender that's returned.
newChunk, recoded, app, err = app.AppendFloatHistogram(nil, t, v, false)
newChunk, recoded, app, err = app.AppendFloatHistogram(nil, st, t, v, false)
}
}
@ -1202,6 +1215,11 @@ func (it *DeletedIterator) AtT() int64 {
return it.Iter.AtT()
}
// AtST TODO(krajorama): test AtST() when chunks support it.
func (it *DeletedIterator) AtST() int64 {
return it.Iter.AtST()
}
func (it *DeletedIterator) Seek(t int64) chunkenc.ValueType {
if it.Iter.Err() != nil {
return chunkenc.ValNone

File diff suppressed because it is too large Load diff