Skip to content
This repository has been archived by the owner on Aug 13, 2019. It is now read-only.

Commit

Permalink
Add tests for analyzeBlock CLI command
Browse files Browse the repository at this point in the history
  • Loading branch information
obitech committed Aug 9, 2019
1 parent 54bfe94 commit eba0f8a
Show file tree
Hide file tree
Showing 3 changed files with 71 additions and 31 deletions.
6 changes: 3 additions & 3 deletions cmd/tsdb/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,16 +32,18 @@ import (

"github.com/go-kit/kit/log"
"github.com/pkg/errors"
"gopkg.in/alecthomas/kingpin.v2"

"github.com/prometheus/tsdb"
"github.com/prometheus/tsdb/chunks"
tsdb_errors "github.com/prometheus/tsdb/errors"
"github.com/prometheus/tsdb/labels"
"gopkg.in/alecthomas/kingpin.v2"
)

const (
printBlocksTableHeader = "BLOCK ULID\tMIN TIME\tMAX TIME\tNUM SAMPLES\tNUM CHUNKS\tNUM SERIES"
defaultAnalyzeLimit = "20"
timeDelta = 30000
)

func main() {
Expand Down Expand Up @@ -250,8 +252,6 @@ func (b *writeBenchmark) run() error {
return nil
}

const timeDelta = 30000

func (b *writeBenchmark) ingestScrapes(lbls []labels.Labels, scrapeCount int) (uint64, error) {
var mu sync.Mutex
var total uint64
Expand Down
46 changes: 43 additions & 3 deletions cmd/tsdb/main_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ import (
"strings"
"testing"
"text/tabwriter"
"time"

"github.com/prometheus/tsdb"
testutildb "github.com/prometheus/tsdb/testutil/db"
Expand Down Expand Up @@ -112,7 +113,7 @@ func TestPrintBlocks(t *testing.T) {
expected = strings.Replace(expected, "\n", "", -1)

if expected != actual {
t.Errorf("expected (%#v) != actual (%#v)", expected, actual)
t.Errorf("expected (%#v) != actual (%#v)", b.String(), actualStdout.String())
}
}

Expand Down Expand Up @@ -150,7 +151,7 @@ func TestExtractBlock(t *testing.T) {
analyzeBlockID = "foo"
block, err = extractBlock(blocks, &analyzeBlockID)
if err == nil {
t.Errorf("Analyzing block %q should throw error", analyzeBlockID)
t.Errorf("Analyzing block ID %q should throw error", analyzeBlockID)
}
if block != nil {
t.Error("block should be nil")
Expand Down Expand Up @@ -180,9 +181,48 @@ func TestAnalyzeBlocks(t *testing.T) {
t.Error(err)
}

var actual bytes.Buffer
var (
expected bytes.Buffer
actual bytes.Buffer
)

// Actual output.
err = analyzeBlock(&actual, block, dal)
if err != nil {
t.Error(err)
}

act := actual.String()
act = strings.Replace(act, " ", "", -1)
act = strings.Replace(act, "\t", "", -1)
act = strings.Replace(act, "\n", "", -1)

// Expected output.
meta := block.Meta()
fmt.Fprintf(&expected, "Block ID: %s\n", meta.ULID)
fmt.Fprintf(&expected, "Duration: %s\n", (time.Duration(meta.MaxTime-meta.MinTime) * 1e6).String())
fmt.Fprintf(&expected, "Series: %d\n", 1)
fmt.Fprintf(&expected, "Label names: %d\n", 1)
fmt.Fprintf(&expected, "Postings (unique label pairs): %d\n", 1)
fmt.Fprintf(&expected, "Postings entries (total label pairs): %d\n", 1)
fmt.Fprintf(&expected, "\nLabel pairs most involved in churning:\n")
fmt.Fprintf(&expected, "1 %s=0", testutildb.DefaultLabelName)
fmt.Fprintf(&expected, "\nLabel names most involved in churning:\n")
fmt.Fprintf(&expected, "1 %s", testutildb.DefaultLabelName)
fmt.Fprintf(&expected, "\nMost common label pairs:\n")
fmt.Fprintf(&expected, "1 %s=0", testutildb.DefaultLabelName)
fmt.Fprintf(&expected, "\nLabel names with highest cumulative label value length:\n")
fmt.Fprintf(&expected, "1 %s", testutildb.DefaultLabelName)
fmt.Fprintf(&expected, "\nHighest cardinality labels:\n")
fmt.Fprintf(&expected, "1 %s", testutildb.DefaultLabelName)
fmt.Fprintf(&expected, "\nHighest cardinality metric names:\n")

exp := expected.String()
exp = strings.Replace(exp, " ", "", -1)
exp = strings.Replace(exp, "\t", "", -1)
exp = strings.Replace(exp, "\n", "", -1)

if exp != act {
t.Errorf("expected (%#v) != actual (%#v)", expected.String(), actual.String())
}
}
50 changes: 25 additions & 25 deletions testutil/db/db.go
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,29 @@ import (
"github.com/prometheus/tsdb/tsdbutil"
)

const (
DefaultLabelName = "labelName"
defaultLabelValue = "labelValue"
)

type sample struct {
t int64
v float64
}

type mockSeries struct {
labels func() labels.Labels
iterator func() tsdb.SeriesIterator
}

func (s sample) T() int64 {
return s.t
}

func (s sample) V() float64 {
return s.v
}

// CreateBlock creates a block with given set of series and returns its dir.
func CreateBlock(tb testing.TB, dir string, series []tsdb.Series) string {
head := createHead(tb, series)
Expand Down Expand Up @@ -72,24 +95,6 @@ func createHead(tb testing.TB, series []tsdb.Series) *tsdb.Head {
return head
}

const (
defaultLabelName = "labelName"
defaultLabelValue = "labelValue"
)

type sample struct {
t int64
v float64
}

func (s sample) T() int64 {
return s.t
}

func (s sample) V() float64 {
return s.v
}

// GenSeries generates series with a given number of labels and values.
func GenSeries(totalSeries, labelCount int, mint, maxt int64) []tsdb.Series {
if totalSeries == 0 || labelCount == 0 {
Expand All @@ -100,9 +105,9 @@ func GenSeries(totalSeries, labelCount int, mint, maxt int64) []tsdb.Series {

for i := 0; i < totalSeries; i++ {
lbls := make(map[string]string, labelCount)
lbls[defaultLabelName] = strconv.Itoa(i)
lbls[DefaultLabelName] = strconv.Itoa(i)
for j := 1; len(lbls) < labelCount; j++ {
lbls[defaultLabelName+strconv.Itoa(j)] = defaultLabelValue + strconv.Itoa(j)
lbls[DefaultLabelName+strconv.Itoa(j)] = defaultLabelValue + strconv.Itoa(j)
}
samples := make([]tsdbutil.Sample, 0, maxt-mint+1)
for t := mint; t < maxt; t++ {
Expand All @@ -113,11 +118,6 @@ func GenSeries(totalSeries, labelCount int, mint, maxt int64) []tsdb.Series {
return series
}

type mockSeries struct {
labels func() labels.Labels
iterator func() tsdb.SeriesIterator
}

func newSeries(l map[string]string, s []tsdbutil.Sample) tsdb.Series {
return &mockSeries{
labels: func() labels.Labels { return labels.FromMap(l) },
Expand Down

0 comments on commit eba0f8a

Please sign in to comment.