Skip to content

Commit

Permalink
Merge tag 'refs/tags/v2.47.1'
Browse files Browse the repository at this point in the history
v2.47.1

# -----BEGIN PGP SIGNATURE-----
#
# iQIzBAABCgAdFiEEkVyweI5uUj6sXzxfTN2OBP/Ad0YFAmUdPkEACgkQTN2OBP/A
# d0bfmA/+NFgq0X0SxjOBoo+AFpq2Sj/CKF8k4FxCnUbg4SMLDLXWAULyykJJePC/
# 2IhKJh8xH70DR3lRaoWV4tXQDX9mT4AvZgonAIoQIgND6NT0Z+FURGbCLGn1emrY
# H4jLuLpnQx5pCO0CjspvlAlFI9OSVK80K594dNX2ZGCeakP/ikKmQyAgyL6Pf0ii
# ojxxGmfdBgFWqziLioyzfEvPy64OlKMawd4GQlVW0vzs+aund2a9P+o9gD2Jc9Fl
# KcE1epJ8Y/2joTDOauM6cWvujd7xop8Q0/75/8A5xs7s1pHCayNid9PhldkYenL8
# 7mjcpr7q4HPSjYM4kl4PdbTTx2i5CX0z0ktlOc246RDOlz4P703bvqzZerdQhoer
# WN38HYN9VcKteQy+bhBocaM5e3NABCXf7Rdqh8aUDWGzEHDJgJ+1SHcnjq44+scq
# pgp3djqiWx3PYYSd68KZyQ/UuXrPFpbJEsmSEDnF1G+tqpRWnidRzydXHgESOpfe
# 5AffWwPrDXv6/Q1Q1xmeJ8IgLLGCutsgsLvZrUazUbC0i2Dd68Ci77YYNglfGdPq
# uKWBDRRSW02AGNaRUnJHHiSHV+2lt5I4WQsn2mxe14WhBNOKkdivpq4ItQdAXaiH
# qx50RLR4rg4s3OzGBR83ZzvQxgWYG4J8ghd8XseLxmkaw9SdGew=
# =cBq4
# -----END PGP SIGNATURE-----
# gpg: directory '/home/runner/.gnupg' created
# gpg: keybox '/home/runner/.gnupg/pubring.kbx' created
# gpg: Signature made Wed Oct  4 10:28:17 2023 UTC
# gpg:                using RSA key 915CB0788E6E523EAC5F3C5F4CDD8E04FFC07746
# gpg: Can't check signature: No public key
  • Loading branch information
github-actions[bot] committed Oct 9, 2023
2 parents f8f9fca + c4d1a8b commit a415b16
Show file tree
Hide file tree
Showing 4 changed files with 57 additions and 4 deletions.
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
# Changelog

## 2.47.1 / 2023-10-04

* [BUGFIX] Fix duplicate sample detection at chunk size limit #12874

## 2.47.0 / 2023-09-06

This release adds an experimental OpenTelemetry (OTLP) Ingestion feature,
Expand Down
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
2.47.0
2.47.1
9 changes: 6 additions & 3 deletions tsdb/head_append.go
Original file line number Diff line number Diff line change
Expand Up @@ -1282,16 +1282,19 @@ func (s *memSeries) appendPreprocessor(t int64, e chunkenc.Encoding, o chunkOpts
// There is no head chunk in this series yet, create the first chunk for the sample.
c = s.cutNewHeadChunk(t, e, o.chunkRange)
chunkCreated = true
} else if len(c.chunk.Bytes()) > maxBytesPerXORChunk {
c = s.cutNewHeadChunk(t, e, o.chunkRange)
chunkCreated = true
}

// Out of order sample.
if c.maxTime >= t {
return c, false, chunkCreated
}

// Check the chunk size, unless we just created it and if the chunk is too large, cut a new one.
if !chunkCreated && len(c.chunk.Bytes()) > maxBytesPerXORChunk {
c = s.cutNewHeadChunk(t, e, o.chunkRange)
chunkCreated = true
}

if c.chunk.Encoding() != e {
// The chunk encoding expected by this append is different than the head chunk's
// encoding. So we cut a new chunk with the expected encoding.
Expand Down
46 changes: 46 additions & 0 deletions tsdb/head_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -5399,3 +5399,49 @@ func TestCuttingNewHeadChunks(t *testing.T) {
})
}
}

// TestHeadDetectsDuplcateSampleAtSizeLimit tests a regression where a duplicate sample
// is appended to the head, right when the head chunk is at the size limit.
// The test adds all samples as duplicate, thus expecting that the result has
// exactly half of the samples.
func TestHeadDetectsDuplicateSampleAtSizeLimit(t *testing.T) {
numSamples := 1000
baseTS := int64(1695209650)

h, _ := newTestHead(t, DefaultBlockDuration, wlog.CompressionNone, false)
defer func() {
require.NoError(t, h.Close())
}()

a := h.Appender(context.Background())
var err error
vals := []float64{math.MaxFloat64, 0x00} // Use the worst case scenario for the XOR encoding. Otherwise we hit the sample limit before the size limit.
for i := 0; i < numSamples; i++ {
ts := baseTS + int64(i/2)*10000
a.Append(0, labels.FromStrings("foo", "bar"), ts, vals[(i/2)%len(vals)])
err = a.Commit()
require.NoError(t, err)
a = h.Appender(context.Background())
}

indexReader, err := h.Index()
require.NoError(t, err)

var (
chunks []chunks.Meta
builder labels.ScratchBuilder
)
require.NoError(t, indexReader.Series(1, &builder, &chunks))

chunkReader, err := h.Chunks()
require.NoError(t, err)

storedSampleCount := 0
for _, chunkMeta := range chunks {
chunk, err := chunkReader.Chunk(chunkMeta)
require.NoError(t, err)
storedSampleCount += chunk.NumSamples()
}

require.Equal(t, numSamples/2, storedSampleCount)
}

0 comments on commit a415b16

Please sign in to comment.