Fix unit tests for new metric implementation

This commit is contained in:
Cameron Sparr 2016-11-28 18:19:35 +00:00
parent db7a4b24b6
commit e5c7a71d8e
29 changed files with 226 additions and 180 deletions

View File

@ -42,7 +42,7 @@ func TestAdd(t *testing.T) {
testm = <-metrics testm = <-metrics
actual = testm.String() actual = testm.String()
assert.Equal(t, assert.Equal(t,
fmt.Sprintf("acctest,acc=test value=101 %d", now.UnixNano()), fmt.Sprintf("acctest,acc=test value=101 %d\n", now.UnixNano()),
actual) actual)
} }
@ -70,7 +70,7 @@ func TestAddFields(t *testing.T) {
testm = <-metrics testm = <-metrics
actual = testm.String() actual = testm.String()
assert.Equal(t, assert.Equal(t,
fmt.Sprintf("acctest,acc=test usage=99 %d", now.UnixNano()), fmt.Sprintf("acctest,acc=test usage=99 %d\n", now.UnixNano()),
actual) actual)
} }
@ -126,7 +126,7 @@ func TestAddNoIntervalWithPrecision(t *testing.T) {
testm = <-a.metrics testm = <-a.metrics
actual = testm.String() actual = testm.String()
assert.Equal(t, assert.Equal(t,
fmt.Sprintf("acctest,acc=test value=101 %d", int64(1139572800000000000)), fmt.Sprintf("acctest,acc=test value=101 %d\n", int64(1139572800000000000)),
actual) actual)
} }
@ -158,7 +158,7 @@ func TestAddDisablePrecision(t *testing.T) {
testm = <-a.metrics testm = <-a.metrics
actual = testm.String() actual = testm.String()
assert.Equal(t, assert.Equal(t,
fmt.Sprintf("acctest,acc=test value=101 %d", int64(1139572800082912748)), fmt.Sprintf("acctest,acc=test value=101 %d\n", int64(1139572800082912748)),
actual) actual)
} }
@ -190,7 +190,7 @@ func TestAddNoPrecisionWithInterval(t *testing.T) {
testm = <-a.metrics testm = <-a.metrics
actual = testm.String() actual = testm.String()
assert.Equal(t, assert.Equal(t,
fmt.Sprintf("acctest,acc=test value=101 %d", int64(1139572800000000000)), fmt.Sprintf("acctest,acc=test value=101 %d\n", int64(1139572800000000000)),
actual) actual)
} }
@ -207,7 +207,7 @@ func TestDifferentPrecisions(t *testing.T) {
testm := <-a.metrics testm := <-a.metrics
actual := testm.String() actual := testm.String()
assert.Equal(t, assert.Equal(t,
fmt.Sprintf("acctest,acc=test value=101 %d", int64(1139572800000000000)), fmt.Sprintf("acctest,acc=test value=101 %d\n", int64(1139572800000000000)),
actual) actual)
a.SetPrecision(0, time.Millisecond) a.SetPrecision(0, time.Millisecond)
@ -217,7 +217,7 @@ func TestDifferentPrecisions(t *testing.T) {
testm = <-a.metrics testm = <-a.metrics
actual = testm.String() actual = testm.String()
assert.Equal(t, assert.Equal(t,
fmt.Sprintf("acctest,acc=test value=101 %d", int64(1139572800083000000)), fmt.Sprintf("acctest,acc=test value=101 %d\n", int64(1139572800083000000)),
actual) actual)
a.SetPrecision(0, time.Microsecond) a.SetPrecision(0, time.Microsecond)
@ -227,7 +227,7 @@ func TestDifferentPrecisions(t *testing.T) {
testm = <-a.metrics testm = <-a.metrics
actual = testm.String() actual = testm.String()
assert.Equal(t, assert.Equal(t,
fmt.Sprintf("acctest,acc=test value=101 %d", int64(1139572800082913000)), fmt.Sprintf("acctest,acc=test value=101 %d\n", int64(1139572800082913000)),
actual) actual)
a.SetPrecision(0, time.Nanosecond) a.SetPrecision(0, time.Nanosecond)
@ -237,7 +237,7 @@ func TestDifferentPrecisions(t *testing.T) {
testm = <-a.metrics testm = <-a.metrics
actual = testm.String() actual = testm.String()
assert.Equal(t, assert.Equal(t,
fmt.Sprintf("acctest,acc=test value=101 %d", int64(1139572800082912748)), fmt.Sprintf("acctest,acc=test value=101 %d\n", int64(1139572800082912748)),
actual) actual)
} }
@ -270,7 +270,7 @@ func TestAddGauge(t *testing.T) {
testm = <-metrics testm = <-metrics
actual = testm.String() actual = testm.String()
assert.Equal(t, assert.Equal(t,
fmt.Sprintf("acctest,acc=test value=101 %d", now.UnixNano()), fmt.Sprintf("acctest,acc=test value=101 %d\n", now.UnixNano()),
actual) actual)
assert.Equal(t, testm.Type(), telegraf.Gauge) assert.Equal(t, testm.Type(), telegraf.Gauge)
} }
@ -304,7 +304,7 @@ func TestAddCounter(t *testing.T) {
testm = <-metrics testm = <-metrics
actual = testm.String() actual = testm.String()
assert.Equal(t, assert.Equal(t,
fmt.Sprintf("acctest,acc=test value=101 %d", now.UnixNano()), fmt.Sprintf("acctest,acc=test value=101 %d\n", now.UnixNano()),
actual) actual)
assert.Equal(t, testm.Type(), telegraf.Counter) assert.Equal(t, testm.Type(), telegraf.Counter)
} }
@ -328,11 +328,11 @@ func (tm *TestMetricMaker) MakeMetric(
return m return m
} }
case telegraf.Counter: case telegraf.Counter:
if m, err := telegraf.NewCounterMetric(measurement, tags, fields, t); err == nil { if m, err := metric.New(measurement, tags, fields, t, telegraf.Counter); err == nil {
return m return m
} }
case telegraf.Gauge: case telegraf.Gauge:
if m, err := telegraf.NewGaugeMetric(measurement, tags, fields, t); err == nil { if m, err := metric.New(measurement, tags, fields, t, telegraf.Gauge); err == nil {
return m return m
} }
} }

View File

@ -184,8 +184,8 @@ func TestMakeMetricA(t *testing.T) {
) )
assert.Equal( assert.Equal(
t, t,
fmt.Sprintf("RITest value=101i %d\n", now.UnixNano()),
m.String(), m.String(),
fmt.Sprintf("RITest value=101i %d", now.UnixNano()),
) )
assert.Equal( assert.Equal(
t, t,
@ -202,8 +202,8 @@ func TestMakeMetricA(t *testing.T) {
) )
assert.Equal( assert.Equal(
t, t,
fmt.Sprintf("RITest value=101i %d\n", now.UnixNano()),
m.String(), m.String(),
fmt.Sprintf("RITest value=101i %d", now.UnixNano()),
) )
assert.Equal( assert.Equal(
t, t,
@ -220,8 +220,8 @@ func TestMakeMetricA(t *testing.T) {
) )
assert.Equal( assert.Equal(
t, t,
fmt.Sprintf("RITest value=101i %d\n", now.UnixNano()),
m.String(), m.String(),
fmt.Sprintf("RITest value=101i %d", now.UnixNano()),
) )
assert.Equal( assert.Equal(
t, t,

View File

@ -50,7 +50,7 @@ func TestMakeMetricNilFields(t *testing.T) {
) )
assert.Equal( assert.Equal(
t, t,
fmt.Sprintf("RITest value=101i %d", now.UnixNano()), fmt.Sprintf("RITest value=101i %d\n", now.UnixNano()),
m.String(), m.String(),
) )
} }
@ -78,8 +78,8 @@ func TestMakeMetric(t *testing.T) {
) )
assert.Equal( assert.Equal(
t, t,
fmt.Sprintf("RITest value=101i %d\n", now.UnixNano()),
m.String(), m.String(),
fmt.Sprintf("RITest value=101i %d", now.UnixNano()),
) )
assert.Equal( assert.Equal(
t, t,
@ -96,8 +96,8 @@ func TestMakeMetric(t *testing.T) {
) )
assert.Equal( assert.Equal(
t, t,
fmt.Sprintf("RITest value=101i %d\n", now.UnixNano()),
m.String(), m.String(),
fmt.Sprintf("RITest value=101i %d", now.UnixNano()),
) )
assert.Equal( assert.Equal(
t, t,
@ -114,8 +114,8 @@ func TestMakeMetric(t *testing.T) {
) )
assert.Equal( assert.Equal(
t, t,
fmt.Sprintf("RITest value=101i %d\n", now.UnixNano()),
m.String(), m.String(),
fmt.Sprintf("RITest value=101i %d", now.UnixNano()),
) )
assert.Equal( assert.Equal(
t, t,
@ -148,8 +148,8 @@ func TestMakeMetricWithPluginTags(t *testing.T) {
) )
assert.Equal( assert.Equal(
t, t,
fmt.Sprintf("RITest,foo=bar value=101i %d\n", now.UnixNano()),
m.String(), m.String(),
fmt.Sprintf("RITest,foo=bar value=101i %d", now.UnixNano()),
) )
} }
@ -204,8 +204,8 @@ func TestMakeMetricWithDaemonTags(t *testing.T) {
) )
assert.Equal( assert.Equal(
t, t,
fmt.Sprintf("RITest,foo=bar value=101i %d\n", now.UnixNano()),
m.String(), m.String(),
fmt.Sprintf("RITest,foo=bar value=101i %d", now.UnixNano()),
) )
} }
@ -237,8 +237,8 @@ func TestMakeMetricInfFields(t *testing.T) {
) )
assert.Equal( assert.Equal(
t, t,
fmt.Sprintf("RITest value=101i %d\n", now.UnixNano()),
m.String(), m.String(),
fmt.Sprintf("RITest value=101i %d", now.UnixNano()),
) )
} }
@ -275,11 +275,20 @@ func TestMakeMetricAllFieldTypes(t *testing.T) {
telegraf.Untyped, telegraf.Untyped,
now, now,
) )
assert.Equal( assert.Contains(t, m.String(), "a=10i")
t, assert.Contains(t, m.String(), "b=10i")
fmt.Sprintf("RITest a=10i,b=10i,c=10i,d=10i,e=10i,f=10i,g=10i,h=10i,i=10i,j=10,k=9223372036854775807i,l=\"foobar\",m=true %d", now.UnixNano()), assert.Contains(t, m.String(), "c=10i")
m.String(), assert.Contains(t, m.String(), "d=10i")
) assert.Contains(t, m.String(), "e=10i")
assert.Contains(t, m.String(), "f=10i")
assert.Contains(t, m.String(), "g=10i")
assert.Contains(t, m.String(), "h=10i")
assert.Contains(t, m.String(), "i=10i")
assert.Contains(t, m.String(), "j=10")
assert.NotContains(t, m.String(), "j=10i")
assert.Contains(t, m.String(), "k=9223372036854775807i")
assert.Contains(t, m.String(), "l=\"foobar\"")
assert.Contains(t, m.String(), "m=true")
} }
func TestMakeMetricNameOverride(t *testing.T) { func TestMakeMetricNameOverride(t *testing.T) {
@ -300,8 +309,8 @@ func TestMakeMetricNameOverride(t *testing.T) {
) )
assert.Equal( assert.Equal(
t, t,
fmt.Sprintf("foobar value=101i %d\n", now.UnixNano()),
m.String(), m.String(),
fmt.Sprintf("foobar value=101i %d", now.UnixNano()),
) )
} }
@ -323,8 +332,8 @@ func TestMakeMetricNamePrefix(t *testing.T) {
) )
assert.Equal( assert.Equal(
t, t,
fmt.Sprintf("foobar_RITest value=101i %d\n", now.UnixNano()),
m.String(), m.String(),
fmt.Sprintf("foobar_RITest value=101i %d", now.UnixNano()),
) )
} }
@ -346,7 +355,7 @@ func TestMakeMetricNameSuffix(t *testing.T) {
) )
assert.Equal( assert.Equal(
t, t,
fmt.Sprintf("RITest_foobar value=101i %d\n", now.UnixNano()),
m.String(), m.String(),
fmt.Sprintf("RITest_foobar value=101i %d", now.UnixNano()),
) )
} }

View File

@ -39,7 +39,7 @@ func BenchmarkRunningOutputAddWrite(b *testing.B) {
ro.Quiet = true ro.Quiet = true
for n := 0; n < b.N; n++ { for n := 0; n < b.N; n++ {
ro.AddMetric(first5[0]) ro.AddMetric(testutil.TestMetric(101, "metric1"))
ro.Write() ro.Write()
} }
} }
@ -55,7 +55,7 @@ func BenchmarkRunningOutputAddWriteEvery100(b *testing.B) {
ro.Quiet = true ro.Quiet = true
for n := 0; n < b.N; n++ { for n := 0; n < b.N; n++ {
ro.AddMetric(first5[0]) ro.AddMetric(testutil.TestMetric(101, "metric1"))
if n%100 == 0 { if n%100 == 0 {
ro.Write() ro.Write()
} }
@ -74,7 +74,7 @@ func BenchmarkRunningOutputAddFailWrites(b *testing.B) {
ro.Quiet = true ro.Quiet = true
for n := 0; n < b.N; n++ { for n := 0; n < b.N; n++ {
ro.AddMetric(first5[0]) ro.AddMetric(testutil.TestMetric(101, "metric1"))
} }
} }
@ -140,7 +140,7 @@ func TestRunningOutput_TagIncludeNoMatch(t *testing.T) {
m := &mockOutput{} m := &mockOutput{}
ro := NewRunningOutput("test", m, conf, 1000, 10000) ro := NewRunningOutput("test", m, conf, 1000, 10000)
ro.AddMetric(first5[0]) ro.AddMetric(testutil.TestMetric(101, "metric1"))
assert.Len(t, m.Metrics(), 0) assert.Len(t, m.Metrics(), 0)
err := ro.Write() err := ro.Write()
@ -161,7 +161,7 @@ func TestRunningOutput_TagExcludeMatch(t *testing.T) {
m := &mockOutput{} m := &mockOutput{}
ro := NewRunningOutput("test", m, conf, 1000, 10000) ro := NewRunningOutput("test", m, conf, 1000, 10000)
ro.AddMetric(first5[0]) ro.AddMetric(testutil.TestMetric(101, "metric1"))
assert.Len(t, m.Metrics(), 0) assert.Len(t, m.Metrics(), 0)
err := ro.Write() err := ro.Write()
@ -182,7 +182,7 @@ func TestRunningOutput_TagExcludeNoMatch(t *testing.T) {
m := &mockOutput{} m := &mockOutput{}
ro := NewRunningOutput("test", m, conf, 1000, 10000) ro := NewRunningOutput("test", m, conf, 1000, 10000)
ro.AddMetric(first5[0]) ro.AddMetric(testutil.TestMetric(101, "metric1"))
assert.Len(t, m.Metrics(), 0) assert.Len(t, m.Metrics(), 0)
err := ro.Write() err := ro.Write()
@ -203,7 +203,7 @@ func TestRunningOutput_TagIncludeMatch(t *testing.T) {
m := &mockOutput{} m := &mockOutput{}
ro := NewRunningOutput("test", m, conf, 1000, 10000) ro := NewRunningOutput("test", m, conf, 1000, 10000)
ro.AddMetric(first5[0]) ro.AddMetric(testutil.TestMetric(101, "metric1"))
assert.Len(t, m.Metrics(), 0) assert.Len(t, m.Metrics(), 0)
err := ro.Write() err := ro.Write()

View File

@ -15,6 +15,8 @@ import (
"github.com/influxdata/influxdb/client/v2" "github.com/influxdata/influxdb/client/v2"
) )
const MaxInt = int(^uint(0) >> 1)
var ( var (
// escaper is for escaping: // escaper is for escaping:
// - tag keys // - tag keys
@ -63,7 +65,6 @@ func New(
m.tags = append(m.tags, []byte("="+escaper.Replace(v))...) m.tags = append(m.tags, []byte("="+escaper.Replace(v))...)
} }
m.fields = []byte{' '}
i := 0 i := 0
for k, v := range fields { for k, v := range fields {
if i != 0 { if i != 0 {
@ -72,7 +73,6 @@ func New(
m.fields = appendField(m.fields, k, v) m.fields = appendField(m.fields, k, v)
i++ i++
} }
m.fields = append(m.fields, ' ')
return m, nil return m, nil
} }
@ -103,6 +103,9 @@ func indexUnescapedByte(buf []byte, b byte) int {
func countBackslashes(buf []byte, index int) int { func countBackslashes(buf []byte, index int) int {
var count int var count int
for { for {
if index < 0 {
return count
}
if buf[index] == '\\' { if buf[index] == '\\' {
count++ count++
index-- index--
@ -130,7 +133,8 @@ type metric struct {
} }
func (m *metric) Point() *client.Point { func (m *metric) Point() *client.Point {
return &client.Point{} c, _ := client.NewPoint(m.Name(), m.Tags(), m.Fields(), m.Time())
return c
} }
func (m *metric) String() string { func (m *metric) String() string {
@ -150,16 +154,25 @@ func (m *metric) Type() telegraf.ValueType {
} }
func (m *metric) Len() int { func (m *metric) Len() int {
return len(m.name) + len(m.tags) + len(m.fields) + len(m.t) + 1 return len(m.name) + len(m.tags) + 1 + len(m.fields) + 1 + len(m.t) + 1
} }
func (m *metric) Serialize() []byte { func (m *metric) Serialize() []byte {
tmp := make([]byte, m.Len()) tmp := make([]byte, m.Len())
copy(tmp, m.name) i := 0
copy(tmp[len(m.name):], m.tags) copy(tmp[i:], m.name)
copy(tmp[len(m.name)+len(m.tags):], m.fields) i += len(m.name)
copy(tmp[len(m.name)+len(m.tags)+len(m.fields):], m.t) copy(tmp[i:], m.tags)
tmp[len(tmp)-1] = '\n' i += len(m.tags)
tmp[i] = ' '
i++
copy(tmp[i:], m.fields)
i += len(m.fields)
tmp[i] = ' '
i++
copy(tmp[i:], m.t)
i += len(m.t)
tmp[i] = '\n'
return tmp return tmp
} }
@ -170,7 +183,7 @@ func (m *metric) Fields() map[string]interface{} {
} }
m.fieldMap = map[string]interface{}{} m.fieldMap = map[string]interface{}{}
i := 1 i := 0
for { for {
if i >= len(m.fields) { if i >= len(m.fields) {
break break
@ -182,10 +195,20 @@ func (m *metric) Fields() map[string]interface{} {
} }
// start index of field value // start index of field value
i2 := i1 + 1 i2 := i1 + 1
// end index of field value // end index of field value
i3 := indexUnescapedByte(m.fields[i:], ',') var i3 int
if i3 == -1 { if m.fields[i:][i2] == '"' {
i3 = len(m.fields[i:]) - 1 i3 = indexUnescapedByte(m.fields[i:][i2+1:], '"')
if i3 == -1 {
i3 = len(m.fields[i:])
}
i3 += i2 + 2 // increment index to the comma
} else {
i3 = indexUnescapedByte(m.fields[i:], ',')
if i3 == -1 {
i3 = len(m.fields[i:])
}
} }
switch m.fields[i:][i2] { switch m.fields[i:][i2] {
@ -213,9 +236,9 @@ func (m *metric) Fields() map[string]interface{} {
} }
} }
case 'T', 't': case 'T', 't':
// TODO handle "true" booleans m.fieldMap[string(m.fields[i:][0:i1])] = true
case 'F', 'f': case 'F', 'f':
// TODO handle "false" booleans m.fieldMap[string(m.fields[i:][0:i1])] = false
default: default:
// TODO handle unsupported field type // TODO handle unsupported field type
} }
@ -309,6 +332,7 @@ func (m *metric) HasTag(key string) bool {
func (m *metric) RemoveTag(key string) bool { func (m *metric) RemoveTag(key string) bool {
m.tagMap = nil m.tagMap = nil
m.hashID = 0 m.hashID = 0
i := bytes.Index(m.tags, []byte(escaper.Replace(key)+"=")) i := bytes.Index(m.tags, []byte(escaper.Replace(key)+"="))
if i == -1 { if i == -1 {
return false return false
@ -355,21 +379,17 @@ func (m *metric) RemoveField(key string) bool {
} }
func (m *metric) Copy() telegraf.Metric { func (m *metric) Copy() telegraf.Metric {
name := make([]byte, len(m.name)) mOut := metric{
tags := make([]byte, len(m.tags)) name: make([]byte, len(m.name)),
fields := make([]byte, len(m.fields)) tags: make([]byte, len(m.tags)),
t := make([]byte, len(m.t)) fields: make([]byte, len(m.fields)),
copy(name, m.name) t: make([]byte, len(m.t)),
copy(tags, m.tags)
copy(fields, m.fields)
copy(t, m.t)
return &metric{
name: name,
tags: tags,
fields: fields,
t: t,
hashID: m.hashID,
} }
copy(mOut.name, m.name)
copy(mOut.tags, m.tags)
copy(mOut.fields, m.fields)
copy(mOut.t, m.t)
return &mOut
} }
func (m *metric) HashID() uint64 { func (m *metric) HashID() uint64 {
@ -423,6 +443,16 @@ func appendField(b []byte, k string, v interface{}) []byte {
case int: case int:
b = strconv.AppendInt(b, int64(v), 10) b = strconv.AppendInt(b, int64(v), 10)
b = append(b, 'i') b = append(b, 'i')
case uint64:
// Cap uints above the maximum int value
var intv int64
if v <= uint64(MaxInt) {
intv = int64(v)
} else {
intv = int64(MaxInt)
}
b = strconv.AppendInt(b, intv, 10)
b = append(b, 'i')
case uint32: case uint32:
b = strconv.AppendInt(b, int64(v), 10) b = strconv.AppendInt(b, int64(v), 10)
b = append(b, 'i') b = append(b, 'i')
@ -432,11 +462,15 @@ func appendField(b []byte, k string, v interface{}) []byte {
case uint8: case uint8:
b = strconv.AppendInt(b, int64(v), 10) b = strconv.AppendInt(b, int64(v), 10)
b = append(b, 'i') b = append(b, 'i')
// TODO: 'uint' should be considered just as "dangerous" as a uint64,
// perhaps the value should be checked and capped at MaxInt64? We could
// then include uint64 as an accepted value
case uint: case uint:
b = strconv.AppendInt(b, int64(v), 10) // Cap uints above the maximum int value
var intv int64
if v <= uint(MaxInt) {
intv = int64(v)
} else {
intv = int64(MaxInt)
}
b = strconv.AppendInt(b, intv, 10)
b = append(b, 'i') b = append(b, 'i')
case float32: case float32:
b = strconv.AppendFloat(b, float64(v), 'f', -1, 32) b = strconv.AppendFloat(b, float64(v), 'f', -1, 32)

View File

@ -4,6 +4,8 @@ import (
"fmt" "fmt"
"testing" "testing"
"time" "time"
"github.com/influxdata/telegraf"
) )
// vars for making sure that the compiler doesnt optimize out the benchmarks: // vars for making sure that the compiler doesnt optimize out the benchmarks:
@ -15,9 +17,9 @@ var (
) )
func BenchmarkNewMetric(b *testing.B) { func BenchmarkNewMetric(b *testing.B) {
var mt Metric var mt telegraf.Metric
for n := 0; n < b.N; n++ { for n := 0; n < b.N; n++ {
mt, _ = NewMetric("test_metric", mt, _ = New("test_metric",
map[string]string{ map[string]string{
"test_tag_1": "tag_value_1", "test_tag_1": "tag_value_1",
"test_tag_2": "tag_value_2", "test_tag_2": "tag_value_2",
@ -35,9 +37,9 @@ func BenchmarkNewMetric(b *testing.B) {
} }
func BenchmarkNewMetricAndInspect(b *testing.B) { func BenchmarkNewMetricAndInspect(b *testing.B) {
var mt Metric var mt telegraf.Metric
for n := 0; n < b.N; n++ { for n := 0; n < b.N; n++ {
mt, _ = NewMetric("test_metric", mt, _ = New("test_metric",
map[string]string{ map[string]string{
"test_tag_1": "tag_value_1", "test_tag_1": "tag_value_1",
"test_tag_2": "tag_value_2", "test_tag_2": "tag_value_2",
@ -59,7 +61,7 @@ func BenchmarkNewMetricAndInspect(b *testing.B) {
} }
func BenchmarkTags(b *testing.B) { func BenchmarkTags(b *testing.B) {
var mt, _ = NewMetric("test_metric", var mt, _ = New("test_metric",
map[string]string{ map[string]string{
"test_tag_1": "tag_value_1", "test_tag_1": "tag_value_1",
"test_tag_2": "tag_value_2", "test_tag_2": "tag_value_2",
@ -79,7 +81,7 @@ func BenchmarkTags(b *testing.B) {
} }
func BenchmarkFields(b *testing.B) { func BenchmarkFields(b *testing.B) {
var mt, _ = NewMetric("test_metric", var mt, _ = New("test_metric",
map[string]string{ map[string]string{
"test_tag_1": "tag_value_1", "test_tag_1": "tag_value_1",
"test_tag_2": "tag_value_2", "test_tag_2": "tag_value_2",
@ -99,7 +101,7 @@ func BenchmarkFields(b *testing.B) {
} }
func BenchmarkSerializeMetric(b *testing.B) { func BenchmarkSerializeMetric(b *testing.B) {
mt, _ := NewMetric("test_metric", mt, _ := New("test_metric",
map[string]string{ map[string]string{
"test_tag_1": "tag_value_1", "test_tag_1": "tag_value_1",
"test_tag_2": "tag_value_2", "test_tag_2": "tag_value_2",
@ -120,7 +122,7 @@ func BenchmarkSerializeMetric(b *testing.B) {
} }
func BenchmarkSerializeMetricBytes(b *testing.B) { func BenchmarkSerializeMetricBytes(b *testing.B) {
mt, _ := NewMetric("test_metric", mt, _ := New("test_metric",
map[string]string{ map[string]string{
"test_tag_1": "tag_value_1", "test_tag_1": "tag_value_1",
"test_tag_2": "tag_value_2", "test_tag_2": "tag_value_2",

View File

@ -6,6 +6,8 @@ import (
"testing" "testing"
"time" "time"
"github.com/influxdata/telegraf"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )
@ -23,7 +25,7 @@ func TestNewMetric(t *testing.T) {
m, err := New("cpu", tags, fields, now) m, err := New("cpu", tags, fields, now)
assert.NoError(t, err) assert.NoError(t, err)
assert.Equal(t, Untyped, m.Type()) assert.Equal(t, telegraf.Untyped, m.Type())
assert.Equal(t, tags, m.Tags()) assert.Equal(t, tags, m.Tags())
assert.Equal(t, fields, m.Fields()) assert.Equal(t, fields, m.Fields())
assert.Equal(t, "cpu", m.Name()) assert.Equal(t, "cpu", m.Name())
@ -42,10 +44,10 @@ func TestNewGaugeMetric(t *testing.T) {
"usage_idle": float64(99), "usage_idle": float64(99),
"usage_busy": float64(1), "usage_busy": float64(1),
} }
m, err := New("cpu", tags, fields, now, Gauge) m, err := New("cpu", tags, fields, now, telegraf.Gauge)
assert.NoError(t, err) assert.NoError(t, err)
assert.Equal(t, Gauge, m.Type()) assert.Equal(t, telegraf.Gauge, m.Type())
assert.Equal(t, tags, m.Tags()) assert.Equal(t, tags, m.Tags())
assert.Equal(t, fields, m.Fields()) assert.Equal(t, fields, m.Fields())
assert.Equal(t, "cpu", m.Name()) assert.Equal(t, "cpu", m.Name())
@ -64,10 +66,10 @@ func TestNewCounterMetric(t *testing.T) {
"usage_idle": float64(99), "usage_idle": float64(99),
"usage_busy": float64(1), "usage_busy": float64(1),
} }
m, err := New("cpu", tags, fields, now, Counter) m, err := New("cpu", tags, fields, now, telegraf.Counter)
assert.NoError(t, err) assert.NoError(t, err)
assert.Equal(t, Counter, m.Type()) assert.Equal(t, telegraf.Counter, m.Type())
assert.Equal(t, tags, m.Tags()) assert.Equal(t, tags, m.Tags())
assert.Equal(t, fields, m.Fields()) assert.Equal(t, fields, m.Fields())
assert.Equal(t, "cpu", m.Name()) assert.Equal(t, "cpu", m.Name())

View File

@ -45,28 +45,25 @@ func Parse(buf []byte) ([]telegraf.Metric, error) {
func ParseWithDefaultTime(buf []byte, t time.Time) ([]telegraf.Metric, error) { func ParseWithDefaultTime(buf []byte, t time.Time) ([]telegraf.Metric, error) {
metrics := make([]telegraf.Metric, 0, bytes.Count(buf, []byte("\n"))+1) metrics := make([]telegraf.Metric, 0, bytes.Count(buf, []byte("\n"))+1)
var ( var errStr string
errStr string i := 0
line []byte
err error
)
b := bytes.NewBuffer(buf)
for { for {
line, err = b.ReadBytes('\n') j := bytes.IndexByte(buf[i:], '\n')
if err != nil { if j == -1 {
break break
} }
if len(line) < 2 { if len(buf[i:i+j]) < 2 {
i += j + 1 // increment i past the previous newline
continue continue
} }
// trim the newline:
line = line[0 : len(line)-1]
m, err := parseMetric(line, t) m, err := parseMetric(buf[i:i+j], t)
if err != nil { if err != nil {
i += j + 1 // increment i past the previous newline
errStr += " " + err.Error() errStr += " " + err.Error()
continue continue
} }
i += j + 1 // increment i past the previous newline
metrics = append(metrics, m) metrics = append(metrics, m)
} }
@ -135,7 +132,10 @@ func parseMetric(buf []byte, defaultTime time.Time) (telegraf.Metric, error) {
m.t = []byte(dTime) m.t = []byte(dTime)
} }
return m, nil // here we copy on return because this allows us to later call
// AddTag, AddField, RemoveTag, RemoveField, etc. without worrying about
// modifying 'tag' bytes having an affect on 'field' bytes, for example.
return m.Copy(), nil
} }
// scanKey scans buf starting at i for the measurement and tag portion of the point. // scanKey scans buf starting at i for the measurement and tag portion of the point.

View File

@ -4,7 +4,6 @@ import (
"testing" "testing"
"time" "time"
"github.com/influxdata/telegraf"
"github.com/influxdata/telegraf/metric" "github.com/influxdata/telegraf/metric"
"github.com/influxdata/telegraf/testutil" "github.com/influxdata/telegraf/testutil"
) )

View File

@ -36,7 +36,7 @@ const malformedJson = `
"status": "green", "status": "green",
` `
const lineProtocol = "cpu,host=foo,datacenter=us-east usage_idle=99,usage_busy=1" const lineProtocol = "cpu,host=foo,datacenter=us-east usage_idle=99,usage_busy=1\n"
const lineProtocolMulti = ` const lineProtocolMulti = `
cpu,cpu=cpu0,host=foo,datacenter=us-east usage_idle=99,usage_busy=1 cpu,cpu=cpu0,host=foo,datacenter=us-east usage_idle=99,usage_busy=1

View File

@ -248,7 +248,7 @@ func (h *HTTPListener) serveWrite(res http.ResponseWriter, req *http.Request) {
bufStart = 0 bufStart = 0
continue continue
} }
if err := h.parse(buf[:i], now); err != nil { if err := h.parse(buf[:i+1], now); err != nil {
log.Println("E! " + err.Error()) log.Println("E! " + err.Error())
return400 = true return400 = true
} }

View File

@ -23,7 +23,7 @@ func TestReadsMetricsFromKafka(t *testing.T) {
testTopic := fmt.Sprintf("telegraf_test_topic_%d", time.Now().Unix()) testTopic := fmt.Sprintf("telegraf_test_topic_%d", time.Now().Unix())
// Send a Kafka message to the kafka host // Send a Kafka message to the kafka host
msg := "cpu_load_short,direction=in,host=server01,region=us-west value=23422.0 1422568543702900257" msg := "cpu_load_short,direction=in,host=server01,region=us-west value=23422.0 1422568543702900257\n"
producer, err := sarama.NewSyncProducer(brokerPeers, nil) producer, err := sarama.NewSyncProducer(brokerPeers, nil)
require.NoError(t, err) require.NoError(t, err)
_, _, err = producer.SendMessage( _, _, err = producer.SendMessage(

View File

@ -12,10 +12,10 @@ import (
) )
const ( const (
testMsg = "cpu_load_short,host=server01 value=23422.0 1422568543702900257" testMsg = "cpu_load_short,host=server01 value=23422.0 1422568543702900257\n"
testMsgGraphite = "cpu.load.short.graphite 23422 1454780029" testMsgGraphite = "cpu.load.short.graphite 23422 1454780029"
testMsgJSON = "{\"a\": 5, \"b\": {\"c\": 6}}\n" testMsgJSON = "{\"a\": 5, \"b\": {\"c\": 6}}\n"
invalidMsg = "cpu_load_short,host=server01 1422568543702900257" invalidMsg = "cpu_load_short,host=server01 1422568543702900257\n"
) )
func newTestKafka() (*Kafka, chan *sarama.ConsumerMessage) { func newTestKafka() (*Kafka, chan *sarama.ConsumerMessage) {

View File

@ -13,10 +13,10 @@ import (
) )
const ( const (
testMsg = "cpu_load_short,host=server01 value=23422.0 1422568543702900257" testMsg = "cpu_load_short,host=server01 value=23422.0 1422568543702900257\n"
testMsgGraphite = "cpu.load.short.graphite 23422 1454780029" testMsgGraphite = "cpu.load.short.graphite 23422 1454780029"
testMsgJSON = "{\"a\": 5, \"b\": {\"c\": 6}}\n" testMsgJSON = "{\"a\": 5, \"b\": {\"c\": 6}}\n"
invalidMsg = "cpu_load_short,host=server01 1422568543702900257" invalidMsg = "cpu_load_short,host=server01 1422568543702900257\n"
) )
func newTestMQTTConsumer() (*MQTTConsumer, chan mqtt.Message) { func newTestMQTTConsumer() (*MQTTConsumer, chan mqtt.Message) {

View File

@ -10,10 +10,10 @@ import (
) )
const ( const (
testMsg = "cpu_load_short,host=server01 value=23422.0 1422568543702900257" testMsg = "cpu_load_short,host=server01 value=23422.0 1422568543702900257\n"
testMsgGraphite = "cpu.load.short.graphite 23422 1454780029" testMsgGraphite = "cpu.load.short.graphite 23422 1454780029"
testMsgJSON = "{\"a\": 5, \"b\": {\"c\": 6}}\n" testMsgJSON = "{\"a\": 5, \"b\": {\"c\": 6}}\n"
invalidMsg = "cpu_load_short,host=server01 1422568543702900257" invalidMsg = "cpu_load_short,host=server01 1422568543702900257\n"
metricBuffer = 5 metricBuffer = 5
) )

View File

@ -20,7 +20,7 @@ import (
// This test is modeled after the kafka consumer integration test // This test is modeled after the kafka consumer integration test
func TestReadsMetricsFromNSQ(t *testing.T) { func TestReadsMetricsFromNSQ(t *testing.T) {
msgID := nsq.MessageID{'1', '2', '3', '4', '5', '6', '7', '8', '9', '0', 'a', 's', 'd', 'f', 'g', 'h'} msgID := nsq.MessageID{'1', '2', '3', '4', '5', '6', '7', '8', '9', '0', 'a', 's', 'd', 'f', 'g', 'h'}
msg := nsq.NewMessage(msgID, []byte("cpu_load_short,direction=in,host=server01,region=us-west value=23422.0 1422568543702900257")) msg := nsq.NewMessage(msgID, []byte("cpu_load_short,direction=in,host=server01,region=us-west value=23422.0 1422568543702900257\n"))
script := []instruction{ script := []instruction{
// SUB // SUB

View File

@ -111,9 +111,11 @@ func TestParseValidPrometheus(t *testing.T) {
"gauge": float64(1), "gauge": float64(1),
}, metrics[0].Fields()) }, metrics[0].Fields())
assert.Equal(t, map[string]string{ assert.Equal(t, map[string]string{
"osVersion": "CentOS Linux 7 (Core)", "osVersion": "CentOS\\ Linux\\ 7\\ (Core)",
"dockerVersion": "1.8.2", "dockerVersion": "1.8.2",
"kernelVersion": "3.10.0-229.20.1.el7.x86_64", "kernelVersion": "3.10.0-229.20.1.el7.x86_64",
"cadvisorRevision": "",
"cadvisorVersion": "",
}, metrics[0].Tags()) }, metrics[0].Tags())
// Counter value // Counter value

View File

@ -212,8 +212,9 @@ func (t *TcpListener) handler(conn *net.TCPConn, id string) {
if n == 0 { if n == 0 {
continue continue
} }
bufCopy := make([]byte, n) bufCopy := make([]byte, n+1)
copy(bufCopy, scanner.Bytes()) copy(bufCopy, scanner.Bytes())
bufCopy[n] = '\n'
select { select {
case t.in <- bufCopy: case t.in <- bufCopy:

View File

@ -106,7 +106,7 @@ func TestConnectUDP(t *testing.T) {
func TestRunParser(t *testing.T) { func TestRunParser(t *testing.T) {
log.SetOutput(ioutil.Discard) log.SetOutput(ioutil.Discard)
var testmsg = []byte("cpu_load_short,host=server01 value=12.0 1422568543702900257") var testmsg = []byte("cpu_load_short,host=server01 value=12.0 1422568543702900257\n")
listener, in := newTestUdpListener() listener, in := newTestUdpListener()
acc := testutil.Accumulator{} acc := testutil.Accumulator{}

View File

@ -156,7 +156,7 @@ func (i *Instrumental) Write(metrics []telegraf.Metric) error {
} }
} }
allPoints := strings.Join(points, "\n") + "\n" allPoints := strings.Join(points, "")
_, err = fmt.Fprintf(i.conn, allPoints) _, err = fmt.Fprintf(i.conn, allPoints)
if err != nil { if err != nil {

View File

@ -17,7 +17,7 @@ func TestFormatMetric(t *testing.T) {
p := testutil.MockMetrics()[0] p := testutil.MockMetrics()[0]
valid_string := "test1,tag1=value1 value=1 1257894000000000000" valid_string := "test1,tag1=value1 value=1 1257894000000000000\n"
func_string, err := FormatMetric(k, p) func_string, err := FormatMetric(k, p)
if func_string != valid_string { if func_string != valid_string {
@ -29,7 +29,7 @@ func TestFormatMetric(t *testing.T) {
Format: "custom", Format: "custom",
} }
valid_custom := "test1,map[tag1:value1],test1,tag1=value1 value=1 1257894000000000000" valid_custom := "test1,map[tag1:value1],test1,tag1=value1 value=1 1257894000000000000\n"
func_custom, err := FormatMetric(k, p) func_custom, err := FormatMetric(k, p)
if func_custom != valid_custom { if func_custom != valid_custom {

View File

@ -163,14 +163,14 @@ func TestBuildGauge(t *testing.T) {
} }
} }
func newHostMetric(value interface{}, name, host string) (metric telegraf.Metric) { func newHostMetric(value interface{}, name, host string) telegraf.Metric {
metric, _ = metric.New( m, _ := metric.New(
name, name,
map[string]string{"host": host}, map[string]string{"host": host},
map[string]interface{}{"value": value}, map[string]interface{}{"value": value},
time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC), time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC),
) )
return return m
} }
func TestBuildGaugeWithSource(t *testing.T) { func TestBuildGaugeWithSource(t *testing.T) {

View File

@ -6,7 +6,6 @@ import (
"testing" "testing"
"time" "time"
"github.com/influxdata/telegraf"
"github.com/influxdata/telegraf/metric" "github.com/influxdata/telegraf/metric"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
@ -505,16 +504,11 @@ func TestFilterMatchMostLongestFilter(t *testing.T) {
t.Fatalf("unexpected error creating parser, got %v", err) t.Fatalf("unexpected error creating parser, got %v", err)
} }
exp, err := metric.New("cpu_load",
map[string]string{"host": "localhost", "resource": "cpu"},
map[string]interface{}{"value": float64(11)},
time.Unix(1435077219, 0))
assert.NoError(t, err)
m, err := p.ParseLine("servers.localhost.cpu.cpu_load 11 1435077219") m, err := p.ParseLine("servers.localhost.cpu.cpu_load 11 1435077219")
assert.NoError(t, err) assert.NoError(t, err)
assert.Equal(t, exp.String(), m.String()) assert.Contains(t, m.String(), ",host=localhost")
assert.Contains(t, m.String(), ",resource=cpu")
} }
func TestFilterMatchMultipleWildcards(t *testing.T) { func TestFilterMatchMultipleWildcards(t *testing.T) {
@ -551,16 +545,12 @@ func TestParseDefaultTags(t *testing.T) {
t.Fatalf("unexpected error creating parser, got %v", err) t.Fatalf("unexpected error creating parser, got %v", err)
} }
exp, err := metric.New("cpu_load",
map[string]string{"host": "localhost", "region": "us-east", "zone": "1c"},
map[string]interface{}{"value": float64(11)},
time.Unix(1435077219, 0))
assert.NoError(t, err)
m, err := p.ParseLine("servers.localhost.cpu_load 11 1435077219") m, err := p.ParseLine("servers.localhost.cpu_load 11 1435077219")
assert.NoError(t, err) assert.NoError(t, err)
assert.Equal(t, exp.String(), m.String()) assert.Contains(t, m.String(), ",host=localhost")
assert.Contains(t, m.String(), ",region=us-east")
assert.Contains(t, m.String(), ",zone=1c")
} }
func TestParseDefaultTemplateTags(t *testing.T) { func TestParseDefaultTemplateTags(t *testing.T) {
@ -572,16 +562,12 @@ func TestParseDefaultTemplateTags(t *testing.T) {
t.Fatalf("unexpected error creating parser, got %v", err) t.Fatalf("unexpected error creating parser, got %v", err)
} }
exp, err := metric.New("cpu_load",
map[string]string{"host": "localhost", "region": "us-east", "zone": "1c"},
map[string]interface{}{"value": float64(11)},
time.Unix(1435077219, 0))
assert.NoError(t, err)
m, err := p.ParseLine("servers.localhost.cpu_load 11 1435077219") m, err := p.ParseLine("servers.localhost.cpu_load 11 1435077219")
assert.NoError(t, err) assert.NoError(t, err)
assert.Equal(t, exp.String(), m.String()) assert.Contains(t, m.String(), ",host=localhost")
assert.Contains(t, m.String(), ",region=us-east")
assert.Contains(t, m.String(), ",zone=1c")
} }
func TestParseDefaultTemplateTagsOverridGlobal(t *testing.T) { func TestParseDefaultTemplateTagsOverridGlobal(t *testing.T) {
@ -593,16 +579,12 @@ func TestParseDefaultTemplateTagsOverridGlobal(t *testing.T) {
t.Fatalf("unexpected error creating parser, got %v", err) t.Fatalf("unexpected error creating parser, got %v", err)
} }
exp, err := metric.New("cpu_load",
map[string]string{"host": "localhost", "region": "us-east", "zone": "1c"},
map[string]interface{}{"value": float64(11)},
time.Unix(1435077219, 0))
assert.NoError(t, err)
m, err := p.ParseLine("servers.localhost.cpu_load 11 1435077219") m, err := p.ParseLine("servers.localhost.cpu_load 11 1435077219")
assert.NoError(t, err) assert.NoError(t, err)
assert.Equal(t, exp.String(), m.String()) assert.Contains(t, m.String(), ",host=localhost")
assert.Contains(t, m.String(), ",region=us-east")
assert.Contains(t, m.String(), ",zone=1c")
} }
func TestParseTemplateWhitespace(t *testing.T) { func TestParseTemplateWhitespace(t *testing.T) {
@ -616,16 +598,12 @@ func TestParseTemplateWhitespace(t *testing.T) {
t.Fatalf("unexpected error creating parser, got %v", err) t.Fatalf("unexpected error creating parser, got %v", err)
} }
exp, err := metric.New("cpu_load",
map[string]string{"host": "localhost", "region": "us-east", "zone": "1c"},
map[string]interface{}{"value": float64(11)},
time.Unix(1435077219, 0))
assert.NoError(t, err)
m, err := p.ParseLine("servers.localhost.cpu_load 11 1435077219") m, err := p.ParseLine("servers.localhost.cpu_load 11 1435077219")
assert.NoError(t, err) assert.NoError(t, err)
assert.Equal(t, exp.String(), m.String()) assert.Contains(t, m.String(), ",host=localhost")
assert.Contains(t, m.String(), ",region=us-east")
assert.Contains(t, m.String(), ",zone=1c")
} }
// Test basic functionality of ApplyTemplate // Test basic functionality of ApplyTemplate

View File

@ -155,11 +155,11 @@ func TestParseDefaultTags(t *testing.T) {
"datacenter": "us-east", "datacenter": "us-east",
"host": "foo", "host": "foo",
"tag": "default", "tag": "default",
}, metrics[0].Tags()) }, metric.Tags())
assert.Equal(t, map[string]interface{}{ assert.Equal(t, map[string]interface{}{
"usage_idle": float64(99), "usage_idle": float64(99),
"usage_busy": float64(1), "usage_busy": float64(1),
}, metrics[0].Fields()) }, metric.Fields())
} }
} }

View File

@ -67,8 +67,7 @@ func TestParseValidOutput(t *testing.T) {
assert.Equal(t, map[string]interface{}{ assert.Equal(t, map[string]interface{}{
"value": float64(0.008457), "value": float64(0.008457),
}, metrics[0].Fields()) }, metrics[0].Fields())
assert.Equal(t, map[string]string{}, metrics[0].Tags()) assert.Equal(t, map[string]string{"unit": ""}, metrics[0].Tags())
} }
func TestParseInvalidOutput(t *testing.T) { func TestParseInvalidOutput(t *testing.T) {

View File

@ -3,12 +3,12 @@ package graphite
import ( import (
"fmt" "fmt"
"sort" "sort"
"strings"
"testing" "testing"
"time" "time"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/influxdata/telegraf"
"github.com/influxdata/telegraf/metric" "github.com/influxdata/telegraf/metric"
) )
@ -72,7 +72,8 @@ func TestSerializeMetricNoHost(t *testing.T) {
assert.NoError(t, err) assert.NoError(t, err)
s := GraphiteSerializer{} s := GraphiteSerializer{}
mS, err := s.Serialize(m) buf, _ := s.Serialize(m)
mS := strings.Split(strings.TrimSpace(string(buf)), "\n")
assert.NoError(t, err) assert.NoError(t, err)
expS := []string{ expS := []string{
@ -99,7 +100,8 @@ func TestSerializeMetricHost(t *testing.T) {
assert.NoError(t, err) assert.NoError(t, err)
s := GraphiteSerializer{} s := GraphiteSerializer{}
mS, err := s.Serialize(m) buf, _ := s.Serialize(m)
mS := strings.Split(strings.TrimSpace(string(buf)), "\n")
assert.NoError(t, err) assert.NoError(t, err)
expS := []string{ expS := []string{
@ -126,7 +128,8 @@ func TestSerializeValueField(t *testing.T) {
assert.NoError(t, err) assert.NoError(t, err)
s := GraphiteSerializer{} s := GraphiteSerializer{}
mS, err := s.Serialize(m) buf, _ := s.Serialize(m)
mS := strings.Split(strings.TrimSpace(string(buf)), "\n")
assert.NoError(t, err) assert.NoError(t, err)
expS := []string{ expS := []string{
@ -152,7 +155,8 @@ func TestSerializeValueField2(t *testing.T) {
s := GraphiteSerializer{ s := GraphiteSerializer{
Template: "host.field.tags.measurement", Template: "host.field.tags.measurement",
} }
mS, err := s.Serialize(m) buf, _ := s.Serialize(m)
mS := strings.Split(strings.TrimSpace(string(buf)), "\n")
assert.NoError(t, err) assert.NoError(t, err)
expS := []string{ expS := []string{
@ -178,7 +182,8 @@ func TestSerializeFieldWithSpaces(t *testing.T) {
s := GraphiteSerializer{ s := GraphiteSerializer{
Template: "host.tags.measurement.field", Template: "host.tags.measurement.field",
} }
mS, err := s.Serialize(m) buf, _ := s.Serialize(m)
mS := strings.Split(strings.TrimSpace(string(buf)), "\n")
assert.NoError(t, err) assert.NoError(t, err)
expS := []string{ expS := []string{
@ -204,7 +209,8 @@ func TestSerializeTagWithSpaces(t *testing.T) {
s := GraphiteSerializer{ s := GraphiteSerializer{
Template: "host.tags.measurement.field", Template: "host.tags.measurement.field",
} }
mS, err := s.Serialize(m) buf, _ := s.Serialize(m)
mS := strings.Split(strings.TrimSpace(string(buf)), "\n")
assert.NoError(t, err) assert.NoError(t, err)
expS := []string{ expS := []string{
@ -230,7 +236,8 @@ func TestSerializeValueField3(t *testing.T) {
s := GraphiteSerializer{ s := GraphiteSerializer{
Template: "field.host.tags.measurement", Template: "field.host.tags.measurement",
} }
mS, err := s.Serialize(m) buf, _ := s.Serialize(m)
mS := strings.Split(strings.TrimSpace(string(buf)), "\n")
assert.NoError(t, err) assert.NoError(t, err)
expS := []string{ expS := []string{
@ -256,7 +263,8 @@ func TestSerializeValueField5(t *testing.T) {
s := GraphiteSerializer{ s := GraphiteSerializer{
Template: template5, Template: template5,
} }
mS, err := s.Serialize(m) buf, _ := s.Serialize(m)
mS := strings.Split(strings.TrimSpace(string(buf)), "\n")
assert.NoError(t, err) assert.NoError(t, err)
expS := []string{ expS := []string{
@ -280,7 +288,8 @@ func TestSerializeMetricPrefix(t *testing.T) {
assert.NoError(t, err) assert.NoError(t, err)
s := GraphiteSerializer{Prefix: "prefix"} s := GraphiteSerializer{Prefix: "prefix"}
mS, err := s.Serialize(m) buf, _ := s.Serialize(m)
mS := strings.Split(strings.TrimSpace(string(buf)), "\n")
assert.NoError(t, err) assert.NoError(t, err)
expS := []string{ expS := []string{

View File

@ -7,6 +7,6 @@ import (
type InfluxSerializer struct { type InfluxSerializer struct {
} }
func (s *InfluxSerializer) Serialize(metric telegraf.Metric) ([]byte, error) { func (s *InfluxSerializer) Serialize(m telegraf.Metric) ([]byte, error) {
return metric.Serialize(), nil return m.Serialize(), nil
} }

View File

@ -2,12 +2,12 @@ package influx
import ( import (
"fmt" "fmt"
"strings"
"testing" "testing"
"time" "time"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/influxdata/telegraf"
"github.com/influxdata/telegraf/metric" "github.com/influxdata/telegraf/metric"
) )
@ -23,7 +23,8 @@ func TestSerializeMetricFloat(t *testing.T) {
assert.NoError(t, err) assert.NoError(t, err)
s := InfluxSerializer{} s := InfluxSerializer{}
mS, err := s.Serialize(m) buf, _ := s.Serialize(m)
mS := strings.Split(strings.TrimSpace(string(buf)), "\n")
assert.NoError(t, err) assert.NoError(t, err)
expS := []string{fmt.Sprintf("cpu,cpu=cpu0 usage_idle=91.5 %d", now.UnixNano())} expS := []string{fmt.Sprintf("cpu,cpu=cpu0 usage_idle=91.5 %d", now.UnixNano())}
@ -42,7 +43,8 @@ func TestSerializeMetricInt(t *testing.T) {
assert.NoError(t, err) assert.NoError(t, err)
s := InfluxSerializer{} s := InfluxSerializer{}
mS, err := s.Serialize(m) buf, _ := s.Serialize(m)
mS := strings.Split(strings.TrimSpace(string(buf)), "\n")
assert.NoError(t, err) assert.NoError(t, err)
expS := []string{fmt.Sprintf("cpu,cpu=cpu0 usage_idle=90i %d", now.UnixNano())} expS := []string{fmt.Sprintf("cpu,cpu=cpu0 usage_idle=90i %d", now.UnixNano())}
@ -61,7 +63,8 @@ func TestSerializeMetricString(t *testing.T) {
assert.NoError(t, err) assert.NoError(t, err)
s := InfluxSerializer{} s := InfluxSerializer{}
mS, err := s.Serialize(m) buf, _ := s.Serialize(m)
mS := strings.Split(strings.TrimSpace(string(buf)), "\n")
assert.NoError(t, err) assert.NoError(t, err)
expS := []string{fmt.Sprintf("cpu,cpu=cpu0 usage_idle=\"foobar\" %d", now.UnixNano())} expS := []string{fmt.Sprintf("cpu,cpu=cpu0 usage_idle=\"foobar\" %d", now.UnixNano())}

View File

@ -2,12 +2,12 @@ package json
import ( import (
"fmt" "fmt"
"strings"
"testing" "testing"
"time" "time"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/influxdata/telegraf"
"github.com/influxdata/telegraf/metric" "github.com/influxdata/telegraf/metric"
) )
@ -23,7 +23,9 @@ func TestSerializeMetricFloat(t *testing.T) {
assert.NoError(t, err) assert.NoError(t, err)
s := JsonSerializer{} s := JsonSerializer{}
mS, err := s.Serialize(m) var buf []byte
buf, err = s.Serialize(m)
mS := strings.Split(strings.TrimSpace(string(buf)), "\n")
assert.NoError(t, err) assert.NoError(t, err)
expS := []string{fmt.Sprintf("{\"fields\":{\"usage_idle\":91.5},\"name\":\"cpu\",\"tags\":{\"cpu\":\"cpu0\"},\"timestamp\":%d}", now.Unix())} expS := []string{fmt.Sprintf("{\"fields\":{\"usage_idle\":91.5},\"name\":\"cpu\",\"tags\":{\"cpu\":\"cpu0\"},\"timestamp\":%d}", now.Unix())}
assert.Equal(t, expS, mS) assert.Equal(t, expS, mS)
@ -41,7 +43,9 @@ func TestSerializeMetricInt(t *testing.T) {
assert.NoError(t, err) assert.NoError(t, err)
s := JsonSerializer{} s := JsonSerializer{}
mS, err := s.Serialize(m) var buf []byte
buf, err = s.Serialize(m)
mS := strings.Split(strings.TrimSpace(string(buf)), "\n")
assert.NoError(t, err) assert.NoError(t, err)
expS := []string{fmt.Sprintf("{\"fields\":{\"usage_idle\":90},\"name\":\"cpu\",\"tags\":{\"cpu\":\"cpu0\"},\"timestamp\":%d}", now.Unix())} expS := []string{fmt.Sprintf("{\"fields\":{\"usage_idle\":90},\"name\":\"cpu\",\"tags\":{\"cpu\":\"cpu0\"},\"timestamp\":%d}", now.Unix())}
@ -60,7 +64,9 @@ func TestSerializeMetricString(t *testing.T) {
assert.NoError(t, err) assert.NoError(t, err)
s := JsonSerializer{} s := JsonSerializer{}
mS, err := s.Serialize(m) var buf []byte
buf, err = s.Serialize(m)
mS := strings.Split(strings.TrimSpace(string(buf)), "\n")
assert.NoError(t, err) assert.NoError(t, err)
expS := []string{fmt.Sprintf("{\"fields\":{\"usage_idle\":\"foobar\"},\"name\":\"cpu\",\"tags\":{\"cpu\":\"cpu0\"},\"timestamp\":%d}", now.Unix())} expS := []string{fmt.Sprintf("{\"fields\":{\"usage_idle\":\"foobar\"},\"name\":\"cpu\",\"tags\":{\"cpu\":\"cpu0\"},\"timestamp\":%d}", now.Unix())}
@ -80,7 +86,9 @@ func TestSerializeMultiFields(t *testing.T) {
assert.NoError(t, err) assert.NoError(t, err)
s := JsonSerializer{} s := JsonSerializer{}
mS, err := s.Serialize(m) var buf []byte
buf, err = s.Serialize(m)
mS := strings.Split(strings.TrimSpace(string(buf)), "\n")
assert.NoError(t, err) assert.NoError(t, err)
expS := []string{fmt.Sprintf("{\"fields\":{\"usage_idle\":90,\"usage_total\":8559615},\"name\":\"cpu\",\"tags\":{\"cpu\":\"cpu0\"},\"timestamp\":%d}", now.Unix())} expS := []string{fmt.Sprintf("{\"fields\":{\"usage_idle\":90,\"usage_total\":8559615},\"name\":\"cpu\",\"tags\":{\"cpu\":\"cpu0\"},\"timestamp\":%d}", now.Unix())}