Flush based on buffer size rather than time

this includes:
- Add Accumulator to the Start() function of service inputs
- For message consumer plugins, use the Accumulator to constantly add
  metrics and make Gather a dummy function
- rework unit tests to match this new behavior.
- make "flush_buffer_when_full" a config option that defaults to true

closes #666
This commit is contained in:
Cameron Sparr
2016-02-15 17:21:38 -07:00
parent 7f539c951a
commit ee468be696
15 changed files with 271 additions and 285 deletions

View File

@@ -4,7 +4,6 @@ import (
"testing"
"time"
"github.com/influxdata/telegraf"
"github.com/influxdata/telegraf/plugins/parsers"
"github.com/influxdata/telegraf/testutil"
@@ -17,29 +16,28 @@ const (
testMsgGraphite = "cpu.load.short.graphite 23422 1454780029"
testMsgJSON = "{\"a\": 5, \"b\": {\"c\": 6}}\n"
invalidMsg = "cpu_load_short,host=server01 1422568543702900257"
pointBuffer = 5
)
func NewTestKafka() (*Kafka, chan *sarama.ConsumerMessage) {
in := make(chan *sarama.ConsumerMessage, pointBuffer)
func newTestKafka() (*Kafka, chan *sarama.ConsumerMessage) {
in := make(chan *sarama.ConsumerMessage, 1000)
k := Kafka{
ConsumerGroup: "test",
Topics: []string{"telegraf"},
ZookeeperPeers: []string{"localhost:2181"},
PointBuffer: pointBuffer,
Offset: "oldest",
in: in,
doNotCommitMsgs: true,
errs: make(chan *sarama.ConsumerError, pointBuffer),
errs: make(chan *sarama.ConsumerError, 1000),
done: make(chan struct{}),
metricC: make(chan telegraf.Metric, pointBuffer),
}
return &k, in
}
// Test that the parser parses kafka messages into points
func TestRunParser(t *testing.T) {
k, in := NewTestKafka()
k, in := newTestKafka()
acc := testutil.Accumulator{}
k.acc = &acc
defer close(k.done)
k.parser, _ = parsers.NewInfluxParser()
@@ -47,12 +45,14 @@ func TestRunParser(t *testing.T) {
in <- saramaMsg(testMsg)
time.Sleep(time.Millisecond)
assert.Equal(t, len(k.metricC), 1)
assert.Equal(t, acc.NFields(), 1)
}
// Test that the parser ignores invalid messages
func TestRunParserInvalidMsg(t *testing.T) {
k, in := NewTestKafka()
k, in := newTestKafka()
acc := testutil.Accumulator{}
k.acc = &acc
defer close(k.done)
k.parser, _ = parsers.NewInfluxParser()
@@ -60,27 +60,14 @@ func TestRunParserInvalidMsg(t *testing.T) {
in <- saramaMsg(invalidMsg)
time.Sleep(time.Millisecond)
assert.Equal(t, len(k.metricC), 0)
}
// Test that points are dropped when we hit the buffer limit
func TestRunParserRespectsBuffer(t *testing.T) {
k, in := NewTestKafka()
defer close(k.done)
k.parser, _ = parsers.NewInfluxParser()
go k.receiver()
for i := 0; i < pointBuffer+1; i++ {
in <- saramaMsg(testMsg)
}
time.Sleep(time.Millisecond)
assert.Equal(t, len(k.metricC), 5)
assert.Equal(t, acc.NFields(), 0)
}
// Test that the parser parses kafka messages into points
func TestRunParserAndGather(t *testing.T) {
k, in := NewTestKafka()
k, in := newTestKafka()
acc := testutil.Accumulator{}
k.acc = &acc
defer close(k.done)
k.parser, _ = parsers.NewInfluxParser()
@@ -88,17 +75,18 @@ func TestRunParserAndGather(t *testing.T) {
in <- saramaMsg(testMsg)
time.Sleep(time.Millisecond)
acc := testutil.Accumulator{}
k.Gather(&acc)
assert.Equal(t, len(acc.Metrics), 1)
assert.Equal(t, acc.NFields(), 1)
acc.AssertContainsFields(t, "cpu_load_short",
map[string]interface{}{"value": float64(23422)})
}
// Test that the parser parses kafka messages into points
func TestRunParserAndGatherGraphite(t *testing.T) {
k, in := NewTestKafka()
k, in := newTestKafka()
acc := testutil.Accumulator{}
k.acc = &acc
defer close(k.done)
k.parser, _ = parsers.NewGraphiteParser("_", []string{}, nil)
@@ -106,17 +94,18 @@ func TestRunParserAndGatherGraphite(t *testing.T) {
in <- saramaMsg(testMsgGraphite)
time.Sleep(time.Millisecond)
acc := testutil.Accumulator{}
k.Gather(&acc)
assert.Equal(t, len(acc.Metrics), 1)
assert.Equal(t, acc.NFields(), 1)
acc.AssertContainsFields(t, "cpu_load_short_graphite",
map[string]interface{}{"value": float64(23422)})
}
// Test that the parser parses kafka messages into points
func TestRunParserAndGatherJSON(t *testing.T) {
k, in := NewTestKafka()
k, in := newTestKafka()
acc := testutil.Accumulator{}
k.acc = &acc
defer close(k.done)
k.parser, _ = parsers.NewJSONParser("kafka_json_test", []string{}, nil)
@@ -124,10 +113,9 @@ func TestRunParserAndGatherJSON(t *testing.T) {
in <- saramaMsg(testMsgJSON)
time.Sleep(time.Millisecond)
acc := testutil.Accumulator{}
k.Gather(&acc)
assert.Equal(t, len(acc.Metrics), 1)
assert.Equal(t, acc.NFields(), 2)
acc.AssertContainsFields(t, "kafka_json_test",
map[string]interface{}{
"a": float64(5),