Remove outputs blocking inputs when output is slow (#4938)

This commit is contained in:
Daniel Nelson
2018-11-05 13:34:28 -08:00
committed by GitHub
parent 74667cd681
commit 6e5c2f8bb6
59 changed files with 3615 additions and 2189 deletions

View File

@@ -1,13 +1,14 @@
package kafka_consumer
import (
"context"
"strings"
"testing"
"github.com/Shopify/sarama"
"github.com/influxdata/telegraf"
"github.com/influxdata/telegraf/plugins/parsers"
"github.com/influxdata/telegraf/testutil"
"github.com/Shopify/sarama"
"github.com/stretchr/testify/assert"
)
@@ -18,31 +19,57 @@ const (
invalidMsg = "cpu_load_short,host=server01 1422568543702900257\n"
)
func newTestKafka() (*Kafka, chan *sarama.ConsumerMessage) {
in := make(chan *sarama.ConsumerMessage, 1000)
k := Kafka{
ConsumerGroup: "test",
Topics: []string{"telegraf"},
Brokers: []string{"localhost:9092"},
Offset: "oldest",
in: in,
doNotCommitMsgs: true,
errs: make(chan error, 1000),
done: make(chan struct{}),
type TestConsumer struct {
errors chan error
messages chan *sarama.ConsumerMessage
}
func (c *TestConsumer) Errors() <-chan error {
return c.errors
}
func (c *TestConsumer) Messages() <-chan *sarama.ConsumerMessage {
return c.messages
}
func (c *TestConsumer) MarkOffset(msg *sarama.ConsumerMessage, metadata string) {
}
func (c *TestConsumer) Close() error {
return nil
}
func (c *TestConsumer) Inject(msg *sarama.ConsumerMessage) {
c.messages <- msg
}
func newTestKafka() (*Kafka, *TestConsumer) {
consumer := &TestConsumer{
errors: make(chan error),
messages: make(chan *sarama.ConsumerMessage, 1000),
}
return &k, in
k := Kafka{
cluster: consumer,
ConsumerGroup: "test",
Topics: []string{"telegraf"},
Brokers: []string{"localhost:9092"},
Offset: "oldest",
MaxUndeliveredMessages: defaultMaxUndeliveredMessages,
doNotCommitMsgs: true,
messages: make(map[telegraf.TrackingID]*sarama.ConsumerMessage),
}
return &k, consumer
}
// Test that the parser parses kafka messages into points
func TestRunParser(t *testing.T) {
k, in := newTestKafka()
k, consumer := newTestKafka()
acc := testutil.Accumulator{}
k.acc = &acc
defer close(k.done)
ctx := context.Background()
k.parser, _ = parsers.NewInfluxParser()
go k.receiver()
in <- saramaMsg(testMsg)
go k.receiver(ctx, &acc)
consumer.Inject(saramaMsg(testMsg))
acc.Wait(1)
assert.Equal(t, acc.NFields(), 1)
@@ -50,14 +77,13 @@ func TestRunParser(t *testing.T) {
// Test that the parser ignores invalid messages
func TestRunParserInvalidMsg(t *testing.T) {
k, in := newTestKafka()
k, consumer := newTestKafka()
acc := testutil.Accumulator{}
k.acc = &acc
defer close(k.done)
ctx := context.Background()
k.parser, _ = parsers.NewInfluxParser()
go k.receiver()
in <- saramaMsg(invalidMsg)
go k.receiver(ctx, &acc)
consumer.Inject(saramaMsg(invalidMsg))
acc.WaitError(1)
assert.Equal(t, acc.NFields(), 0)
@@ -66,15 +92,14 @@ func TestRunParserInvalidMsg(t *testing.T) {
// Test that overlong messages are dropped
func TestDropOverlongMsg(t *testing.T) {
const maxMessageLen = 64 * 1024
k, in := newTestKafka()
k, consumer := newTestKafka()
k.MaxMessageLen = maxMessageLen
acc := testutil.Accumulator{}
k.acc = &acc
defer close(k.done)
ctx := context.Background()
overlongMsg := strings.Repeat("v", maxMessageLen+1)
go k.receiver()
in <- saramaMsg(overlongMsg)
go k.receiver(ctx, &acc)
consumer.Inject(saramaMsg(overlongMsg))
acc.WaitError(1)
assert.Equal(t, acc.NFields(), 0)
@@ -82,14 +107,13 @@ func TestDropOverlongMsg(t *testing.T) {
// Test that the parser parses kafka messages into points
func TestRunParserAndGather(t *testing.T) {
k, in := newTestKafka()
k, consumer := newTestKafka()
acc := testutil.Accumulator{}
k.acc = &acc
defer close(k.done)
ctx := context.Background()
k.parser, _ = parsers.NewInfluxParser()
go k.receiver()
in <- saramaMsg(testMsg)
go k.receiver(ctx, &acc)
consumer.Inject(saramaMsg(testMsg))
acc.Wait(1)
acc.GatherError(k.Gather)
@@ -101,14 +125,13 @@ func TestRunParserAndGather(t *testing.T) {
// Test that the parser parses kafka messages into points
func TestRunParserAndGatherGraphite(t *testing.T) {
k, in := newTestKafka()
k, consumer := newTestKafka()
acc := testutil.Accumulator{}
k.acc = &acc
defer close(k.done)
ctx := context.Background()
k.parser, _ = parsers.NewGraphiteParser("_", []string{}, nil)
go k.receiver()
in <- saramaMsg(testMsgGraphite)
go k.receiver(ctx, &acc)
consumer.Inject(saramaMsg(testMsgGraphite))
acc.Wait(1)
acc.GatherError(k.Gather)
@@ -120,17 +143,16 @@ func TestRunParserAndGatherGraphite(t *testing.T) {
// Test that the parser parses kafka messages into points
func TestRunParserAndGatherJSON(t *testing.T) {
k, in := newTestKafka()
k, consumer := newTestKafka()
acc := testutil.Accumulator{}
k.acc = &acc
defer close(k.done)
ctx := context.Background()
k.parser, _ = parsers.NewParser(&parsers.Config{
DataFormat: "json",
MetricName: "kafka_json_test",
})
go k.receiver()
in <- saramaMsg(testMsgJSON)
go k.receiver(ctx, &acc)
consumer.Inject(saramaMsg(testMsgJSON))
acc.Wait(1)
acc.GatherError(k.Gather)