use float chunk format with ST instead of original
Some checks failed
CI / Go tests (push) Has been cancelled
CI / More Go tests (push) Has been cancelled
CI / Go tests with previous Go version (push) Has been cancelled
CI / UI tests (push) Has been cancelled
CI / Go tests on Windows (push) Has been cancelled
CI / Mixins tests (push) Has been cancelled
CI / Build Prometheus for common architectures (push) Has been cancelled
CI / Build Prometheus for all architectures (push) Has been cancelled
CI / Check generated parser (push) Has been cancelled
CI / golangci-lint (push) Has been cancelled
CI / fuzzing (push) Has been cancelled
CI / codeql (push) Has been cancelled
CI / Report status of build Prometheus for all architectures (push) Has been cancelled
CI / Publish main branch artifacts (push) Has been cancelled
CI / Publish release artefacts (push) Has been cancelled
CI / Publish UI on npm Registry (push) Has been cancelled

Signed-off-by: György Krajcsovits <gyorgy.krajcsovits@grafana.com>
This commit is contained in:
György Krajcsovits 2026-01-22 18:19:47 +01:00
parent 5d0f59d8fe
commit fbfd3944e2
No known key found for this signature in database
GPG key ID: 47A8F9CE80FD7C7F
9 changed files with 48 additions and 27 deletions

View file

@ -912,7 +912,7 @@ func (DefaultBlockPopulator) PopulateBlock(ctx context.Context, metrics *Compact
switch chk.Chunk.Encoding() {
case chunkenc.EncHistogram, chunkenc.EncFloatHistogram:
meta.Stats.NumHistogramSamples += samples
case chunkenc.EncXOR:
case chunkenc.EncXOR, chunkenc.EncXOROptST:
meta.Stats.NumFloatSamples += samples
}
}

View file

@ -2771,7 +2771,7 @@ func TestOOOWALWrite_AppendV2(t *testing.T) {
{Ref: 1, T: minutes(35), V: 35},
},
[]record.RefMmapMarker{ // 3rd sample, hence m-mapped.
{Ref: 1, MmapRef: 0x100000000 + 8},
{Ref: 1, MmapRef: 0x100000000 + 8 /* segment header size */},
},
[]record.RefSample{
{Ref: 1, T: minutes(36), V: 36},
@ -2779,7 +2779,7 @@ func TestOOOWALWrite_AppendV2(t *testing.T) {
},
[]record.RefMmapMarker{ // 3rd sample, hence m-mapped.
{Ref: 1, MmapRef: 0x100000000 + 58},
{Ref: 1, MmapRef: 0x100000000 + 59 /* 8 segment header + 51 chunk, 51=50 byte chunk data + ST header */},
},
[]record.RefSample{ // Does not contain the in-order sample here.
{Ref: 1, T: minutes(50), V: 50},
@ -2787,14 +2787,14 @@ func TestOOOWALWrite_AppendV2(t *testing.T) {
// Single commit but multiple OOO records.
[]record.RefMmapMarker{
{Ref: 2, MmapRef: 0x100000000 + 107},
{Ref: 2, MmapRef: 0x100000000 + 109},
},
[]record.RefSample{
{Ref: 2, T: minutes(50), V: 50},
{Ref: 2, T: minutes(51), V: 51},
},
[]record.RefMmapMarker{
{Ref: 2, MmapRef: 0x100000000 + 156},
{Ref: 2, MmapRef: 0x100000000 + 159},
},
[]record.RefSample{
{Ref: 2, T: minutes(52), V: 52},

View file

@ -4011,7 +4011,7 @@ func TestOOOWALWrite(t *testing.T) {
{Ref: 1, T: minutes(35), V: 35},
},
[]record.RefMmapMarker{ // 3rd sample, hence m-mapped.
{Ref: 1, MmapRef: 0x100000000 + 8},
{Ref: 1, MmapRef: 0x100000000 + 8 /* segment header size */},
},
[]record.RefSample{
{Ref: 1, T: minutes(36), V: 36},
@ -4019,7 +4019,7 @@ func TestOOOWALWrite(t *testing.T) {
},
[]record.RefMmapMarker{ // 3rd sample, hence m-mapped.
{Ref: 1, MmapRef: 0x100000000 + 58},
{Ref: 1, MmapRef: 0x100000000 + 59 /* segment header size + 51 chunk, 51=50 byte chunk data + ST header */},
},
[]record.RefSample{ // Does not contain the in-order sample here.
{Ref: 1, T: minutes(50), V: 50},
@ -4027,14 +4027,14 @@ func TestOOOWALWrite(t *testing.T) {
// Single commit but multiple OOO records.
[]record.RefMmapMarker{
{Ref: 2, MmapRef: 0x100000000 + 107},
{Ref: 2, MmapRef: 0x100000000 + 109},
},
[]record.RefSample{
{Ref: 2, T: minutes(50), V: 50},
{Ref: 2, T: minutes(51), V: 51},
},
[]record.RefMmapMarker{
{Ref: 2, MmapRef: 0x100000000 + 156},
{Ref: 2, MmapRef: 0x100000000 + 159},
},
[]record.RefSample{
{Ref: 2, T: minutes(52), V: 52},

View file

@ -1845,7 +1845,7 @@ type chunkOpts struct {
// isolation for this append.)
// Series lock must be held when calling.
func (s *memSeries) append(t int64, v float64, appendID uint64, o chunkOpts) (sampleInOrder, chunkCreated bool) {
c, sampleInOrder, chunkCreated := s.appendPreprocessor(t, chunkenc.EncXOR, o)
c, sampleInOrder, chunkCreated := s.appendPreprocessor(t, chunkenc.EncXOROptST, o)
if !sampleInOrder {
return sampleInOrder, chunkCreated
}

View file

@ -3842,8 +3842,8 @@ func TestCuttingNewHeadChunks_AppenderV2(t *testing.T) {
numSamples int
numBytes int
}{
{numSamples: 120, numBytes: 46},
{numSamples: 60, numBytes: 32},
{numSamples: 120, numBytes: 47},
{numSamples: 60, numBytes: 33},
},
},
"large float samples": {
@ -3859,8 +3859,8 @@ func TestCuttingNewHeadChunks_AppenderV2(t *testing.T) {
numSamples int
numBytes int
}{
{99, 1008},
{21, 219},
{99, 1009},
{21, 220},
},
},
"small histograms": {

View file

@ -6202,8 +6202,8 @@ func TestCuttingNewHeadChunks(t *testing.T) {
numSamples int
numBytes int
}{
{numSamples: 120, numBytes: 46},
{numSamples: 60, numBytes: 32},
{numSamples: 120, numBytes: 47},
{numSamples: 60, numBytes: 33},
},
},
"large float samples": {
@ -6219,8 +6219,8 @@ func TestCuttingNewHeadChunks(t *testing.T) {
numSamples int
numBytes int
}{
{99, 1008},
{21, 219},
{99, 1009},
{21, 220},
},
},
"small histograms": {

View file

@ -1161,7 +1161,8 @@ func (s *memSeries) encodeToSnapshotRecord(b []byte) []byte {
buf.PutUvarintBytes(s.headChunks.chunk.Bytes())
switch enc {
case chunkenc.EncXOR:
case chunkenc.EncXOR, chunkenc.EncXOROptST:
// TODO(krajorama): handle EncXOROptST properly once we start using it.
// Backwards compatibility for old sampleBuf which had last 4 samples.
for range 3 {
buf.PutBE64int64(0)
@ -1214,7 +1215,8 @@ func decodeSeriesFromChunkSnapshot(d *record.Decoder, b []byte) (csr chunkSnapsh
csr.mc.chunk = chk
switch enc {
case chunkenc.EncXOR:
case chunkenc.EncXOR, chunkenc.EncXOROptST:
// TODO(krajorama): handle EncXOROptST properly once we start using it.
// Backwards-compatibility for old sampleBuf which had last 4 samples.
for range 3 {
_ = dec.Be64int64()

View file

@ -96,7 +96,7 @@ func (o *OOOChunk) ToEncodedChunks(mint, maxt int64) (chks []memChunk, err error
if s.t > maxt {
break
}
encoding := chunkenc.EncXOR
encoding := chunkenc.EncXOROptST
if s.h != nil {
encoding = chunkenc.EncHistogram
} else if s.fh != nil {
@ -118,8 +118,10 @@ func (o *OOOChunk) ToEncodedChunks(mint, maxt int64) (chks []memChunk, err error
chunk = chunkenc.NewHistogramChunk()
case chunkenc.EncFloatHistogram:
chunk = chunkenc.NewFloatHistogramChunk()
case chunkenc.EncXOROptST:
chunk = chunkenc.NewXOROptSTChunk()
default:
chunk = chunkenc.NewXORChunk()
panic("unknown encoding, this should never happen")
}
app, err = chunk.Appender()
if err != nil {
@ -127,7 +129,7 @@ func (o *OOOChunk) ToEncodedChunks(mint, maxt int64) (chks []memChunk, err error
}
}
switch encoding {
case chunkenc.EncXOR:
case chunkenc.EncXOR, chunkenc.EncXOROptST:
// TODO(krajorama): pass ST.
app.Append(0, s.t, s.f)
case chunkenc.EncHistogram:

View file

@ -184,7 +184,7 @@ func TestOOOChunks_ToEncodedChunks(t *testing.T) {
},
expectedCounterResets: []histogram.CounterResetHint{histogram.UnknownCounterReset, histogram.UnknownCounterReset},
expectedChunks: []chunkVerify{
{encoding: chunkenc.EncXOR, minTime: 1000, maxTime: 1100},
{encoding: chunkenc.EncXOROptST, minTime: 1000, maxTime: 1100},
},
},
"mix of floats and histograms": {
@ -195,9 +195,9 @@ func TestOOOChunks_ToEncodedChunks(t *testing.T) {
},
expectedCounterResets: []histogram.CounterResetHint{histogram.UnknownCounterReset, histogram.UnknownCounterReset, histogram.UnknownCounterReset},
expectedChunks: []chunkVerify{
{encoding: chunkenc.EncXOR, minTime: 1000, maxTime: 1000},
{encoding: chunkenc.EncXOROptST, minTime: 1000, maxTime: 1000},
{encoding: chunkenc.EncHistogram, minTime: 1100, maxTime: 1100},
{encoding: chunkenc.EncXOR, minTime: 1200, maxTime: 1200},
{encoding: chunkenc.EncXOROptST, minTime: 1200, maxTime: 1200},
},
},
"has an implicit counter reset": {
@ -241,6 +241,16 @@ func TestOOOChunks_ToEncodedChunks(t *testing.T) {
{encoding: chunkenc.EncHistogram, minTime: 0, maxTime: 1},
},
},
"float has ST": {
samples: []sample{
{t: 1000, f: 43.0, st: 900},
{t: 1100, f: 42.0, st: 1000},
},
expectedCounterResets: []histogram.CounterResetHint{histogram.UnknownCounterReset, histogram.UnknownCounterReset},
expectedChunks: []chunkVerify{
{encoding: chunkenc.EncXOROptST, minTime: 1000, maxTime: 1100},
},
},
}
for name, tc := range testCases {
@ -278,12 +288,17 @@ func TestOOOChunks_ToEncodedChunks(t *testing.T) {
continue
}
switch c.chunk.Encoding() {
case chunkenc.EncXOR:
case chunkenc.EncXOR, chunkenc.EncXOROptST:
for j, s := range samples {
require.Equal(t, chunkenc.ValFloat, s.Type())
// XOR chunks don't have counter reset hints, so we shouldn't expect anything else than UnknownCounterReset.
require.Equal(t, histogram.UnknownCounterReset, tc.expectedCounterResets[sampleIndex+j], "sample reset hint %d", sampleIndex+j)
require.Equal(t, tc.samples[sampleIndex+j].f, s.F(), "sample %d", sampleIndex+j)
if c.chunk.Encoding() == chunkenc.EncXOROptST {
// TODO(krajorama): update ST once it's handled.
// require.Equal(t, tc.samples[sampleIndex+j].st, s.ST(), "sample %d start timestamp", sampleIndex+j)
require.Equal(t, int64(0), s.ST(), "sample %d start timestamp", sampleIndex+j)
}
}
case chunkenc.EncHistogram:
for j, s := range samples {
@ -301,6 +316,8 @@ func TestOOOChunks_ToEncodedChunks(t *testing.T) {
compareTo.CounterResetHint = tc.expectedCounterResets[sampleIndex+j]
require.Equal(t, compareTo, s.FH().Compact(0), "sample %d", sampleIndex+j)
}
default:
t.Fatalf("unexpected chunk encoding %d", c.chunk.Encoding())
}
sampleIndex += len(samples)
}