Skip to content
Merged
1 change: 1 addition & 0 deletions Gopkg.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion pkg/chunk/cache/cache_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ import (

"github.com/cortexproject/cortex/pkg/chunk"
"github.com/cortexproject/cortex/pkg/chunk/cache"
prom_chunk "github.com/cortexproject/cortex/pkg/prom1/storage/local/chunk"
prom_chunk "github.com/cortexproject/cortex/pkg/chunk/encoding"
"github.com/prometheus/common/model"
"github.com/stretchr/testify/require"
)
Expand Down
17 changes: 13 additions & 4 deletions pkg/chunk/chunk.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ import (
"strings"
"sync"

prom_chunk "github.com/cortexproject/cortex/pkg/prom1/storage/local/chunk"
prom_chunk "github.com/cortexproject/cortex/pkg/chunk/encoding"
"github.com/cortexproject/cortex/pkg/prom1/storage/metric"
"github.com/golang/snappy"
jsoniter "github.com/json-iterator/go"
Expand All @@ -28,6 +28,7 @@ const (
ErrInvalidChecksum = errs.Error("invalid chunk checksum")
ErrWrongMetadata = errs.Error("wrong chunk metadata")
ErrMetadataLength = errs.Error("chunk metadata wrong length")
ErrDataLength = errs.Error("chunk data wrong length")
)

var castagnoliTable = crc32.MakeTable(crc32.Castagnoli)
Expand Down Expand Up @@ -213,12 +214,12 @@ func (c *Chunk) Encode() ([]byte, error) {

// Write the metadata length back at the start of the buffer.
// (note this length includes the 4 bytes for the length itself)
binary.BigEndian.PutUint32(metadataLenBytes[:], uint32(buf.Len()))
metadataLen := buf.Len()
binary.BigEndian.PutUint32(metadataLenBytes[:], uint32(metadataLen))
copy(buf.Bytes(), metadataLenBytes[:])

// Write the data length
// Write another 4 empty bytes - we will come back and put the len in here.
dataLenBytes := [4]byte{}
binary.BigEndian.PutUint32(dataLenBytes[:], uint32(prom_chunk.ChunkLen))
if _, err := buf.Write(dataLenBytes[:]); err != nil {
return nil, err
}
Expand All @@ -228,6 +229,10 @@ func (c *Chunk) Encode() ([]byte, error) {
return nil, err
}

// Now write the data len back into the buf.
binary.BigEndian.PutUint32(dataLenBytes[:], uint32(buf.Len()-metadataLen-4))
copy(buf.Bytes()[metadataLen:], dataLenBytes[:])

// Now work out the checksum
c.encoded = buf.Bytes()
c.ChecksumSet = true
Expand Down Expand Up @@ -314,6 +319,10 @@ func (c *Chunk) Decode(decodeContext *DecodeContext, input []byte) error {

c.encoded = input
remainingData := input[len(input)-r.Len():]
if int(dataLen) != len(remainingData) {
return ErrDataLength
}

return c.Data.UnmarshalFromBuf(remainingData[:int(dataLen)])
}

Expand Down
6 changes: 3 additions & 3 deletions pkg/chunk/chunk_store_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ import (
"golang.org/x/net/context"

"github.com/cortexproject/cortex/pkg/chunk/cache"
"github.com/cortexproject/cortex/pkg/prom1/storage/local/chunk"
"github.com/cortexproject/cortex/pkg/chunk/encoding"
"github.com/cortexproject/cortex/pkg/util"
"github.com/cortexproject/cortex/pkg/util/extract"
"github.com/cortexproject/cortex/pkg/util/validation"
Expand Down Expand Up @@ -415,7 +415,7 @@ func TestChunkStoreRandom(t *testing.T) {
const chunkLen = 2 * 3600 // in seconds
for i := 0; i < 100; i++ {
ts := model.TimeFromUnix(int64(i * chunkLen))
chunks, _ := chunk.New().Add(model.SamplePair{
chunks, _ := encoding.New().Add(model.SamplePair{
Timestamp: ts,
Value: model.SampleValue(float64(i)),
})
Expand Down Expand Up @@ -479,7 +479,7 @@ func TestChunkStoreLeastRead(t *testing.T) {
const chunkLen = 60 // in seconds
for i := 0; i < 24; i++ {
ts := model.TimeFromUnix(int64(i * chunkLen))
chunks, _ := chunk.New().Add(model.SamplePair{
chunks, _ := encoding.New().Add(model.SamplePair{
Timestamp: ts,
Value: model.SampleValue(float64(i)),
})
Expand Down
20 changes: 16 additions & 4 deletions pkg/chunk/chunk_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ import (
"testing"
"time"

"github.com/cortexproject/cortex/pkg/prom1/storage/local/chunk"
"github.com/cortexproject/cortex/pkg/chunk/encoding"
"github.com/cortexproject/cortex/pkg/util"
"github.com/pkg/errors"
"github.com/prometheus/common/model"
Expand All @@ -24,13 +24,21 @@ func dummyChunk(now model.Time) Chunk {
})
}

func dummyChunkFor(now model.Time, metric model.Metric) Chunk {
cs, _ := chunk.New().Add(model.SamplePair{Timestamp: now, Value: 0})
func dummyChunkForEncoding(now model.Time, metric model.Metric, enc encoding.Encoding, samples int) Chunk {
c, _ := encoding.NewForEncoding(enc)
for i := 0; i < samples; i++ {
t := time.Duration(i) * 15 * time.Second
cs, err := c.Add(model.SamplePair{Timestamp: now.Add(t), Value: 0})
if err != nil {
panic(err)
}
c = cs[0]
}
chunk := NewChunk(
userID,
metric.Fingerprint(),
metric,
cs[0],
c,
now.Add(-time.Hour),
now,
)
Expand All @@ -42,6 +50,10 @@ func dummyChunkFor(now model.Time, metric model.Metric) Chunk {
return chunk
}

func dummyChunkFor(now model.Time, metric model.Metric) Chunk {
return dummyChunkForEncoding(now, metric, encoding.Varbit, 1)
}

func TestChunkCodec(t *testing.T) {
dummy := dummyChunk(model.Now())
decodeContext := NewDecodeContext()
Expand Down
Loading