auto update head sample use in tests

find . -name "*.go" -type f -exec sed -E -i \
's/([^[:alpha:]]sample\{)([^,{:]+,[^,]+,[^,]+,[^,]+\})/\10, \2/g' {} +

I've omitted tsdb/ooo_head.go from the commit because I'm also adding todo
there.

Signed-off-by: György Krajcsovits <gyorgy.krajcsovits@grafana.com>
This commit is contained in:
György Krajcsovits 2026-01-14 09:55:45 +01:00
parent f616689f09
commit 28dca34f4f
No known key found for this signature in database
GPG key ID: 47A8F9CE80FD7C7F
6 changed files with 417 additions and 417 deletions

View file

@ -176,7 +176,7 @@ func TestCorruptedChunk(t *testing.T) {
t.Run(tc.name, func(t *testing.T) {
tmpdir := t.TempDir()
series := storage.NewListSeries(labels.FromStrings("a", "b"), []chunks.Sample{sample{1, 1, nil, nil}})
series := storage.NewListSeries(labels.FromStrings("a", "b"), []chunks.Sample{sample{0, 1, 1, nil, nil}})
blockDir := createBlock(t, tmpdir, []storage.Series{series})
files, err := sequenceFiles(chunkDir(blockDir))
require.NoError(t, err)
@ -236,7 +236,7 @@ func TestLabelValuesWithMatchers(t *testing.T) {
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.FromStrings(
"tens", fmt.Sprintf("value%d", i/10),
"unique", fmt.Sprintf("value%d", i),
), []chunks.Sample{sample{100, 0, nil, nil}}))
), []chunks.Sample{sample{0, 100, 0, nil, nil}}))
}
blockDir := createBlock(t, tmpdir, seriesEntries)
@ -319,7 +319,7 @@ func TestBlockQuerierReturnsSortedLabelValues(t *testing.T) {
for i := 100; i > 0; i-- {
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.FromStrings(
"__name__", fmt.Sprintf("value%d", i),
), []chunks.Sample{sample{100, 0, nil, nil}}))
), []chunks.Sample{sample{0, 100, 0, nil, nil}}))
}
blockDir := createBlock(t, tmpdir, seriesEntries)
@ -436,7 +436,7 @@ func BenchmarkLabelValuesWithMatchers(b *testing.B) {
"a_unique", fmt.Sprintf("value%d", i),
"b_tens", fmt.Sprintf("value%d", i/(metricCount/10)),
"c_ninety", fmt.Sprintf("value%d", i/(metricCount/10)/9), // "0" for the first 90%, then "1"
), []chunks.Sample{sample{100, 0, nil, nil}}))
), []chunks.Sample{sample{0, 100, 0, nil, nil}}))
}
blockDir := createBlock(b, tmpdir, seriesEntries)
@ -472,13 +472,13 @@ func TestLabelNamesWithMatchers(t *testing.T) {
for i := range 100 {
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.FromStrings(
"unique", fmt.Sprintf("value%d", i),
), []chunks.Sample{sample{100, 0, nil, nil}}))
), []chunks.Sample{sample{0, 100, 0, nil, nil}}))
if i%10 == 0 {
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.FromStrings(
"tens", fmt.Sprintf("value%d", i/10),
"unique", fmt.Sprintf("value%d", i),
), []chunks.Sample{sample{100, 0, nil, nil}}))
), []chunks.Sample{sample{0, 100, 0, nil, nil}}))
}
if i%20 == 0 {
@ -486,7 +486,7 @@ func TestLabelNamesWithMatchers(t *testing.T) {
"tens", fmt.Sprintf("value%d", i/10),
"twenties", fmt.Sprintf("value%d", i/20),
"unique", fmt.Sprintf("value%d", i),
), []chunks.Sample{sample{100, 0, nil, nil}}))
), []chunks.Sample{sample{0, 100, 0, nil, nil}}))
}
}
@ -542,7 +542,7 @@ func TestBlockIndexReader_PostingsForLabelMatching(t *testing.T) {
testPostingsForLabelMatching(t, 2, func(t *testing.T, series []labels.Labels) IndexReader {
var seriesEntries []storage.Series
for _, s := range series {
seriesEntries = append(seriesEntries, storage.NewListSeries(s, []chunks.Sample{sample{100, 0, nil, nil}}))
seriesEntries = append(seriesEntries, storage.NewListSeries(s, []chunks.Sample{sample{0, 100, 0, nil, nil}}))
}
blockDir := createBlock(t, t.TempDir(), seriesEntries)

View file

@ -372,7 +372,7 @@ func TestDeleteSimple_AppendV2(t *testing.T) {
expSamples := make([]chunks.Sample, 0, len(c.remaint))
for _, ts := range c.remaint {
expSamples = append(expSamples, sample{ts, smpls[ts], nil, nil})
expSamples = append(expSamples, sample{0, ts, smpls[ts], nil, nil})
}
expss := newMockSeriesSet([]storage.Series{
@ -507,7 +507,7 @@ func TestSkippingInvalidValuesInSameTxn_AppendV2(t *testing.T) {
ssMap := query(t, q, labels.MustNewMatcher(labels.MatchEqual, "a", "b"))
require.Equal(t, map[string][]chunks.Sample{
labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 1, nil, nil}},
labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 0, 1, nil, nil}},
}, ssMap)
// Append Out of Order Value.
@ -524,7 +524,7 @@ func TestSkippingInvalidValuesInSameTxn_AppendV2(t *testing.T) {
ssMap = query(t, q, labels.MustNewMatcher(labels.MatchEqual, "a", "b"))
require.Equal(t, map[string][]chunks.Sample{
labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 1, nil, nil}, sample{10, 3, nil, nil}},
labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 0, 1, nil, nil}, sample{0, 10, 3, nil, nil}},
}, ssMap)
}
@ -669,7 +669,7 @@ func TestDB_SnapshotWithDelete_AppendV2(t *testing.T) {
expSamples := make([]chunks.Sample, 0, len(c.remaint))
for _, ts := range c.remaint {
expSamples = append(expSamples, sample{ts, smpls[ts], nil, nil})
expSamples = append(expSamples, sample{0, ts, smpls[ts], nil, nil})
}
expss := newMockSeriesSet([]storage.Series{
@ -772,7 +772,7 @@ func TestDB_e2e_AppendV2(t *testing.T) {
for range numDatapoints {
v := rand.Float64()
series = append(series, sample{ts, v, nil, nil})
series = append(series, sample{0, ts, v, nil, nil})
_, err := app.Append(0, lset, 0, ts, v, nil, nil, storage.AOptions{})
require.NoError(t, err)
@ -1094,7 +1094,7 @@ func TestTombstoneClean_AppendV2(t *testing.T) {
expSamples := make([]chunks.Sample, 0, len(c.remaint))
for _, ts := range c.remaint {
expSamples = append(expSamples, sample{ts, smpls[ts], nil, nil})
expSamples = append(expSamples, sample{0, ts, smpls[ts], nil, nil})
}
expss := newMockSeriesSet([]storage.Series{
@ -2310,7 +2310,7 @@ func TestCompactHead_AppendV2(t *testing.T) {
val := rand.Float64()
_, err := app.Append(0, labels.FromStrings("a", "b"), 0, int64(i), val, nil, nil, storage.AOptions{})
require.NoError(t, err)
expSamples = append(expSamples, sample{int64(i), val, nil, nil})
expSamples = append(expSamples, sample{0, int64(i), val, nil, nil})
}
require.NoError(t, app.Commit())
@ -2337,7 +2337,7 @@ func TestCompactHead_AppendV2(t *testing.T) {
series = seriesSet.At().Iterator(series)
for series.Next() == chunkenc.ValFloat {
time, val := series.At()
actSamples = append(actSamples, sample{time, val, nil, nil})
actSamples = append(actSamples, sample{0, time, val, nil, nil})
}
require.NoError(t, series.Err())
}

View file

@ -546,7 +546,7 @@ func TestDeleteSimple(t *testing.T) {
expSamples := make([]chunks.Sample, 0, len(c.remaint))
for _, ts := range c.remaint {
expSamples = append(expSamples, sample{ts, smpls[ts], nil, nil})
expSamples = append(expSamples, sample{0, ts, smpls[ts], nil, nil})
}
expss := newMockSeriesSet([]storage.Series{
@ -691,7 +691,7 @@ func TestSkippingInvalidValuesInSameTxn(t *testing.T) {
ssMap := query(t, q, labels.MustNewMatcher(labels.MatchEqual, "a", "b"))
require.Equal(t, map[string][]chunks.Sample{
labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 1, nil, nil}},
labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 0, 1, nil, nil}},
}, ssMap)
// Append Out of Order Value.
@ -708,7 +708,7 @@ func TestSkippingInvalidValuesInSameTxn(t *testing.T) {
ssMap = query(t, q, labels.MustNewMatcher(labels.MatchEqual, "a", "b"))
require.Equal(t, map[string][]chunks.Sample{
labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 1, nil, nil}, sample{10, 3, nil, nil}},
labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 0, 1, nil, nil}, sample{0, 10, 3, nil, nil}},
}, ssMap)
}
@ -853,7 +853,7 @@ func TestDB_SnapshotWithDelete(t *testing.T) {
expSamples := make([]chunks.Sample, 0, len(c.remaint))
for _, ts := range c.remaint {
expSamples = append(expSamples, sample{ts, smpls[ts], nil, nil})
expSamples = append(expSamples, sample{0, ts, smpls[ts], nil, nil})
}
expss := newMockSeriesSet([]storage.Series{
@ -956,7 +956,7 @@ func TestDB_e2e(t *testing.T) {
for range numDatapoints {
v := rand.Float64()
series = append(series, sample{ts, v, nil, nil})
series = append(series, sample{0, ts, v, nil, nil})
_, err := app.Append(0, lset, ts, v)
require.NoError(t, err)
@ -1278,7 +1278,7 @@ func TestTombstoneClean(t *testing.T) {
expSamples := make([]chunks.Sample, 0, len(c.remaint))
for _, ts := range c.remaint {
expSamples = append(expSamples, sample{ts, smpls[ts], nil, nil})
expSamples = append(expSamples, sample{0, ts, smpls[ts], nil, nil})
}
expss := newMockSeriesSet([]storage.Series{
@ -2863,11 +2863,11 @@ func assureChunkFromSamples(t *testing.T, samples []chunks.Sample) chunks.Meta {
// TestChunkWriter_ReadAfterWrite ensures that chunk segment are cut at the set segment size and
// that the resulted segments includes the expected chunks data.
func TestChunkWriter_ReadAfterWrite(t *testing.T) {
chk1 := assureChunkFromSamples(t, []chunks.Sample{sample{1, 1, nil, nil}})
chk2 := assureChunkFromSamples(t, []chunks.Sample{sample{1, 2, nil, nil}})
chk3 := assureChunkFromSamples(t, []chunks.Sample{sample{1, 3, nil, nil}})
chk4 := assureChunkFromSamples(t, []chunks.Sample{sample{1, 4, nil, nil}})
chk5 := assureChunkFromSamples(t, []chunks.Sample{sample{1, 5, nil, nil}})
chk1 := assureChunkFromSamples(t, []chunks.Sample{sample{0, 1, 1, nil, nil}})
chk2 := assureChunkFromSamples(t, []chunks.Sample{sample{0, 1, 2, nil, nil}})
chk3 := assureChunkFromSamples(t, []chunks.Sample{sample{0, 1, 3, nil, nil}})
chk4 := assureChunkFromSamples(t, []chunks.Sample{sample{0, 1, 4, nil, nil}})
chk5 := assureChunkFromSamples(t, []chunks.Sample{sample{0, 1, 5, nil, nil}})
chunkSize := len(chk1.Chunk.Bytes()) + chunks.MaxChunkLengthFieldSize + chunks.ChunkEncodingSize + crc32.Size
tests := []struct {
@ -3069,11 +3069,11 @@ func TestRangeForTimestamp(t *testing.T) {
func TestChunkReader_ConcurrentReads(t *testing.T) {
t.Parallel()
chks := []chunks.Meta{
assureChunkFromSamples(t, []chunks.Sample{sample{1, 1, nil, nil}}),
assureChunkFromSamples(t, []chunks.Sample{sample{1, 2, nil, nil}}),
assureChunkFromSamples(t, []chunks.Sample{sample{1, 3, nil, nil}}),
assureChunkFromSamples(t, []chunks.Sample{sample{1, 4, nil, nil}}),
assureChunkFromSamples(t, []chunks.Sample{sample{1, 5, nil, nil}}),
assureChunkFromSamples(t, []chunks.Sample{sample{0, 1, 1, nil, nil}}),
assureChunkFromSamples(t, []chunks.Sample{sample{0, 1, 2, nil, nil}}),
assureChunkFromSamples(t, []chunks.Sample{sample{0, 1, 3, nil, nil}}),
assureChunkFromSamples(t, []chunks.Sample{sample{0, 1, 4, nil, nil}}),
assureChunkFromSamples(t, []chunks.Sample{sample{0, 1, 5, nil, nil}}),
}
tempDir := t.TempDir()
@ -3133,7 +3133,7 @@ func TestCompactHead(t *testing.T) {
val := rand.Float64()
_, err := app.Append(0, labels.FromStrings("a", "b"), int64(i), val)
require.NoError(t, err)
expSamples = append(expSamples, sample{int64(i), val, nil, nil})
expSamples = append(expSamples, sample{0, int64(i), val, nil, nil})
}
require.NoError(t, app.Commit())
@ -3160,7 +3160,7 @@ func TestCompactHead(t *testing.T) {
series = seriesSet.At().Iterator(series)
for series.Next() == chunkenc.ValFloat {
time, val := series.At()
actSamples = append(actSamples, sample{time, val, nil, nil})
actSamples = append(actSamples, sample{0, time, val, nil, nil})
}
require.NoError(t, series.Err())
}

View file

@ -312,8 +312,8 @@ func TestHeadAppenderV2_WALMultiRef(t *testing.T) {
// The samples before the new ref should be discarded since Head truncation
// happens only after compacting the Head.
require.Equal(t, map[string][]chunks.Sample{`{foo="bar"}`: {
sample{1700, 3, nil, nil},
sample{2000, 4, nil, nil},
sample{0, 1700, 3, nil, nil},
sample{0, 2000, 4, nil, nil},
}}, series)
}
@ -605,7 +605,7 @@ func TestHeadAppenderV2_DeleteUntilCurrMax(t *testing.T) {
it = exps.Iterator(nil)
resSamples, err := storage.ExpandSamples(it, newSample)
require.NoError(t, err)
require.Equal(t, []chunks.Sample{sample{11, 1, nil, nil}}, resSamples)
require.Equal(t, []chunks.Sample{sample{0, 11, 1, nil, nil}}, resSamples)
for res.Next() {
}
require.NoError(t, res.Err())
@ -722,7 +722,7 @@ func TestHeadAppenderV2_Delete_e2e(t *testing.T) {
v := rand.Float64()
_, err := app.Append(0, ls, 0, ts, v, nil, nil, storage.AOptions{})
require.NoError(t, err)
series = append(series, sample{ts, v, nil, nil})
series = append(series, sample{0, ts, v, nil, nil})
ts += rand.Int63n(timeInterval) + 1
}
seriesMap[labels.New(l...).String()] = series
@ -1520,7 +1520,7 @@ func TestDataMissingOnQueryDuringCompaction_AppenderV2(t *testing.T) {
ref, err = app.Append(ref, labels.FromStrings("a", "b"), 0, ts, float64(i), nil, nil, storage.AOptions{})
require.NoError(t, err)
maxt = ts
expSamples = append(expSamples, sample{ts, float64(i), nil, nil})
expSamples = append(expSamples, sample{0, ts, float64(i), nil, nil})
}
require.NoError(t, app.Commit())
@ -2166,17 +2166,17 @@ func TestChunkSnapshot_AppenderV2(t *testing.T) {
aOpts.Exemplars = []exemplar.Exemplar{newExemplar(lbls, ts)}
}
val := rand.Float64()
expSeries[lblStr] = append(expSeries[lblStr], sample{ts, val, nil, nil})
expSeries[lblStr] = append(expSeries[lblStr], sample{0, ts, val, nil, nil})
_, err := app.Append(0, lbls, 0, ts, val, nil, nil, aOpts)
require.NoError(t, err)
hist := histograms[int(ts)]
expHist[lblsHistStr] = append(expHist[lblsHistStr], sample{ts, 0, hist, nil})
expHist[lblsHistStr] = append(expHist[lblsHistStr], sample{0, ts, 0, hist, nil})
_, err = app.Append(0, lblsHist, 0, ts, 0, hist, nil, storage.AOptions{})
require.NoError(t, err)
floatHist := floatHistogram[int(ts)]
expFloatHist[lblsFloatHistStr] = append(expFloatHist[lblsFloatHistStr], sample{ts, 0, nil, floatHist})
expFloatHist[lblsFloatHistStr] = append(expFloatHist[lblsFloatHistStr], sample{0, ts, 0, nil, floatHist})
_, err = app.Append(0, lblsFloatHist, 0, ts, 0, nil, floatHist, storage.AOptions{})
require.NoError(t, err)
@ -2244,17 +2244,17 @@ func TestChunkSnapshot_AppenderV2(t *testing.T) {
aOpts.Exemplars = []exemplar.Exemplar{newExemplar(lbls, ts)}
}
val := rand.Float64()
expSeries[lblStr] = append(expSeries[lblStr], sample{ts, val, nil, nil})
expSeries[lblStr] = append(expSeries[lblStr], sample{0, ts, val, nil, nil})
_, err := app.Append(0, lbls, 0, ts, val, nil, nil, aOpts)
require.NoError(t, err)
hist := histograms[int(ts)]
expHist[lblsHistStr] = append(expHist[lblsHistStr], sample{ts, 0, hist, nil})
expHist[lblsHistStr] = append(expHist[lblsHistStr], sample{0, ts, 0, hist, nil})
_, err = app.Append(0, lblsHist, 0, ts, 0, hist, nil, storage.AOptions{})
require.NoError(t, err)
floatHist := floatHistogram[int(ts)]
expFloatHist[lblsFloatHistStr] = append(expFloatHist[lblsFloatHistStr], sample{ts, 0, nil, floatHist})
expFloatHist[lblsFloatHistStr] = append(expFloatHist[lblsFloatHistStr], sample{0, ts, 0, nil, floatHist})
_, err = app.Append(0, lblsFloatHist, 0, ts, 0, nil, floatHist, storage.AOptions{})
require.NoError(t, err)

View file

@ -841,8 +841,8 @@ func TestHead_WALMultiRef(t *testing.T) {
// The samples before the new ref should be discarded since Head truncation
// happens only after compacting the Head.
require.Equal(t, map[string][]chunks.Sample{`{foo="bar"}`: {
sample{1700, 3, nil, nil},
sample{2000, 4, nil, nil},
sample{0, 1700, 3, nil, nil},
sample{0, 2000, 4, nil, nil},
}}, series)
}
@ -1859,7 +1859,7 @@ func TestDeleteUntilCurMax(t *testing.T) {
it = exps.Iterator(nil)
resSamples, err := storage.ExpandSamples(it, newSample)
require.NoError(t, err)
require.Equal(t, []chunks.Sample{sample{11, 1, nil, nil}}, resSamples)
require.Equal(t, []chunks.Sample{sample{0, 11, 1, nil, nil}}, resSamples)
for res.Next() {
}
require.NoError(t, res.Err())
@ -1976,7 +1976,7 @@ func TestDelete_e2e(t *testing.T) {
v := rand.Float64()
_, err := app.Append(0, ls, ts, v)
require.NoError(t, err)
series = append(series, sample{ts, v, nil, nil})
series = append(series, sample{0, ts, v, nil, nil})
ts += rand.Int63n(timeInterval) + 1
}
seriesMap[labels.New(l...).String()] = series
@ -3838,7 +3838,7 @@ func TestDataMissingOnQueryDuringCompaction(t *testing.T) {
ref, err = app.Append(ref, labels.FromStrings("a", "b"), ts, float64(i))
require.NoError(t, err)
maxt = ts
expSamples = append(expSamples, sample{ts, float64(i), nil, nil})
expSamples = append(expSamples, sample{0, ts, float64(i), nil, nil})
}
require.NoError(t, app.Commit())
@ -4503,17 +4503,17 @@ func TestChunkSnapshot(t *testing.T) {
// 240 samples should m-map at least 1 chunk.
for ts := int64(1); ts <= 240; ts++ {
val := rand.Float64()
expSeries[lblStr] = append(expSeries[lblStr], sample{ts, val, nil, nil})
expSeries[lblStr] = append(expSeries[lblStr], sample{0, ts, val, nil, nil})
ref, err := app.Append(0, lbls, ts, val)
require.NoError(t, err)
hist := histograms[int(ts)]
expHist[lblsHistStr] = append(expHist[lblsHistStr], sample{ts, 0, hist, nil})
expHist[lblsHistStr] = append(expHist[lblsHistStr], sample{0, ts, 0, hist, nil})
_, err = app.AppendHistogram(0, lblsHist, ts, hist, nil)
require.NoError(t, err)
floatHist := floatHistogram[int(ts)]
expFloatHist[lblsFloatHistStr] = append(expFloatHist[lblsFloatHistStr], sample{ts, 0, nil, floatHist})
expFloatHist[lblsFloatHistStr] = append(expFloatHist[lblsFloatHistStr], sample{0, ts, 0, nil, floatHist})
_, err = app.AppendHistogram(0, lblsFloatHist, ts, nil, floatHist)
require.NoError(t, err)
@ -4577,17 +4577,17 @@ func TestChunkSnapshot(t *testing.T) {
// 240 samples should m-map at least 1 chunk.
for ts := int64(241); ts <= 480; ts++ {
val := rand.Float64()
expSeries[lblStr] = append(expSeries[lblStr], sample{ts, val, nil, nil})
expSeries[lblStr] = append(expSeries[lblStr], sample{0, ts, val, nil, nil})
ref, err := app.Append(0, lbls, ts, val)
require.NoError(t, err)
hist := histograms[int(ts)]
expHist[lblsHistStr] = append(expHist[lblsHistStr], sample{ts, 0, hist, nil})
expHist[lblsHistStr] = append(expHist[lblsHistStr], sample{0, ts, 0, hist, nil})
_, err = app.AppendHistogram(0, lblsHist, ts, hist, nil)
require.NoError(t, err)
floatHist := floatHistogram[int(ts)]
expFloatHist[lblsFloatHistStr] = append(expFloatHist[lblsFloatHistStr], sample{ts, 0, nil, floatHist})
expFloatHist[lblsFloatHistStr] = append(expFloatHist[lblsFloatHistStr], sample{0, ts, 0, nil, floatHist})
_, err = app.AppendHistogram(0, lblsFloatHist, ts, nil, floatHist)
require.NoError(t, err)

File diff suppressed because it is too large Load diff