2015-06-27 03:56:14 +00:00
|
|
|
package kafka_consumer
|
|
|
|
|
|
|
|
import (
|
|
|
|
"testing"
|
|
|
|
"time"
|
|
|
|
|
2016-01-23 00:45:31 +00:00
|
|
|
"github.com/influxdb/influxdb/models"
|
|
|
|
"github.com/influxdb/telegraf/testutil"
|
2015-11-16 20:12:45 +00:00
|
|
|
|
|
|
|
"github.com/Shopify/sarama"
|
2015-06-27 03:56:14 +00:00
|
|
|
"github.com/stretchr/testify/assert"
|
|
|
|
)
|
|
|
|
|
2015-11-16 20:12:45 +00:00
|
|
|
const (
|
|
|
|
testMsg = "cpu_load_short,host=server01 value=23422.0 1422568543702900257"
|
|
|
|
invalidMsg = "cpu_load_short,host=server01 1422568543702900257"
|
|
|
|
pointBuffer = 5
|
|
|
|
)
|
2015-06-27 03:56:14 +00:00
|
|
|
|
2015-11-16 20:12:45 +00:00
|
|
|
func NewTestKafka() (*Kafka, chan *sarama.ConsumerMessage) {
|
|
|
|
in := make(chan *sarama.ConsumerMessage, pointBuffer)
|
|
|
|
k := Kafka{
|
|
|
|
ConsumerGroup: "test",
|
|
|
|
Topics: []string{"telegraf"},
|
|
|
|
ZookeeperPeers: []string{"localhost:2181"},
|
|
|
|
PointBuffer: pointBuffer,
|
|
|
|
Offset: "oldest",
|
|
|
|
in: in,
|
|
|
|
doNotCommitMsgs: true,
|
|
|
|
errs: make(chan *sarama.ConsumerError, pointBuffer),
|
|
|
|
done: make(chan struct{}),
|
|
|
|
pointChan: make(chan models.Point, pointBuffer),
|
2015-06-27 03:56:14 +00:00
|
|
|
}
|
2015-11-16 20:12:45 +00:00
|
|
|
return &k, in
|
|
|
|
}
|
2015-06-27 03:56:14 +00:00
|
|
|
|
2015-11-16 20:12:45 +00:00
|
|
|
// Test that the parser parses kafka messages into points
|
|
|
|
func TestRunParser(t *testing.T) {
|
|
|
|
k, in := NewTestKafka()
|
|
|
|
defer close(k.done)
|
2015-06-27 03:56:14 +00:00
|
|
|
|
2015-11-16 20:12:45 +00:00
|
|
|
go k.parser()
|
|
|
|
in <- saramaMsg(testMsg)
|
|
|
|
time.Sleep(time.Millisecond)
|
2015-06-27 03:56:14 +00:00
|
|
|
|
2015-11-16 20:12:45 +00:00
|
|
|
assert.Equal(t, len(k.pointChan), 1)
|
2015-06-27 03:56:14 +00:00
|
|
|
}
|
|
|
|
|
2015-11-16 20:12:45 +00:00
|
|
|
// Test that the parser ignores invalid messages
|
|
|
|
func TestRunParserInvalidMsg(t *testing.T) {
|
|
|
|
k, in := NewTestKafka()
|
|
|
|
defer close(k.done)
|
2015-06-27 03:56:14 +00:00
|
|
|
|
2015-11-16 20:12:45 +00:00
|
|
|
go k.parser()
|
|
|
|
in <- saramaMsg(invalidMsg)
|
|
|
|
time.Sleep(time.Millisecond)
|
2015-06-27 03:56:14 +00:00
|
|
|
|
2015-11-16 20:12:45 +00:00
|
|
|
assert.Equal(t, len(k.pointChan), 0)
|
2015-06-27 03:56:14 +00:00
|
|
|
}
|
|
|
|
|
2015-11-16 20:12:45 +00:00
|
|
|
// Test that points are dropped when we hit the buffer limit
|
|
|
|
func TestRunParserRespectsBuffer(t *testing.T) {
|
|
|
|
k, in := NewTestKafka()
|
|
|
|
defer close(k.done)
|
|
|
|
|
|
|
|
go k.parser()
|
|
|
|
for i := 0; i < pointBuffer+1; i++ {
|
|
|
|
in <- saramaMsg(testMsg)
|
2015-10-16 22:58:52 +00:00
|
|
|
}
|
2015-11-16 20:12:45 +00:00
|
|
|
time.Sleep(time.Millisecond)
|
|
|
|
|
|
|
|
assert.Equal(t, len(k.pointChan), 5)
|
2015-06-27 03:56:14 +00:00
|
|
|
}
|
|
|
|
|
2015-11-16 20:12:45 +00:00
|
|
|
// Test that the parser parses kafka messages into points
|
|
|
|
func TestRunParserAndGather(t *testing.T) {
|
|
|
|
k, in := NewTestKafka()
|
|
|
|
defer close(k.done)
|
|
|
|
|
|
|
|
go k.parser()
|
|
|
|
in <- saramaMsg(testMsg)
|
|
|
|
time.Sleep(time.Millisecond)
|
2015-06-27 03:56:14 +00:00
|
|
|
|
2015-11-16 20:12:45 +00:00
|
|
|
acc := testutil.Accumulator{}
|
|
|
|
k.Gather(&acc)
|
2015-06-27 03:56:14 +00:00
|
|
|
|
2015-11-16 20:12:45 +00:00
|
|
|
assert.Equal(t, len(acc.Points), 1)
|
2016-01-06 23:55:28 +00:00
|
|
|
acc.AssertContainsFields(t, "cpu_load_short",
|
|
|
|
map[string]interface{}{"value": float64(23422)})
|
2015-06-27 03:56:14 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func saramaMsg(val string) *sarama.ConsumerMessage {
|
|
|
|
return &sarama.ConsumerMessage{
|
|
|
|
Key: nil,
|
|
|
|
Value: []byte(val),
|
|
|
|
Offset: 0,
|
|
|
|
Partition: 0,
|
|
|
|
}
|
|
|
|
}
|