Enable gofmt code simplification (#4887)
This commit is contained in:
parent
4a311830c6
commit
ee056278f5
4
Makefile
4
Makefile
|
@ -12,7 +12,7 @@ PREFIX := /usr/local
|
||||||
BRANCH := $(shell git rev-parse --abbrev-ref HEAD)
|
BRANCH := $(shell git rev-parse --abbrev-ref HEAD)
|
||||||
COMMIT := $(shell git rev-parse --short HEAD)
|
COMMIT := $(shell git rev-parse --short HEAD)
|
||||||
GOFILES ?= $(shell git ls-files '*.go')
|
GOFILES ?= $(shell git ls-files '*.go')
|
||||||
GOFMT ?= $(shell gofmt -l $(filter-out plugins/parsers/influx/machine.go, $(GOFILES)))
|
GOFMT ?= $(shell gofmt -l -s $(filter-out plugins/parsers/influx/machine.go, $(GOFILES)))
|
||||||
BUILDFLAGS ?=
|
BUILDFLAGS ?=
|
||||||
|
|
||||||
ifdef GOBIN
|
ifdef GOBIN
|
||||||
|
@ -55,7 +55,7 @@ test:
|
||||||
|
|
||||||
.PHONY: fmt
|
.PHONY: fmt
|
||||||
fmt:
|
fmt:
|
||||||
@gofmt -w $(filter-out plugins/parsers/influx/machine.go, $(GOFILES))
|
@gofmt -s -w $(filter-out plugins/parsers/influx/machine.go, $(GOFILES))
|
||||||
|
|
||||||
.PHONY: fmtcheck
|
.PHONY: fmtcheck
|
||||||
fmtcheck:
|
fmtcheck:
|
||||||
|
|
|
@ -296,13 +296,13 @@ func main() {
|
||||||
switch {
|
switch {
|
||||||
case *fOutputList:
|
case *fOutputList:
|
||||||
fmt.Println("Available Output Plugins:")
|
fmt.Println("Available Output Plugins:")
|
||||||
for k, _ := range outputs.Outputs {
|
for k := range outputs.Outputs {
|
||||||
fmt.Printf(" %s\n", k)
|
fmt.Printf(" %s\n", k)
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
case *fInputList:
|
case *fInputList:
|
||||||
fmt.Println("Available Input Plugins:")
|
fmt.Println("Available Input Plugins:")
|
||||||
for k, _ := range inputs.Inputs {
|
for k := range inputs.Inputs {
|
||||||
fmt.Printf(" %s\n", k)
|
fmt.Printf(" %s\n", k)
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
|
|
|
@ -32,13 +32,13 @@ func TestConfig_LoadSingleInputWithEnvVars(t *testing.T) {
|
||||||
FieldDrop: []string{"other", "stuff"},
|
FieldDrop: []string{"other", "stuff"},
|
||||||
FieldPass: []string{"some", "strings"},
|
FieldPass: []string{"some", "strings"},
|
||||||
TagDrop: []models.TagFilter{
|
TagDrop: []models.TagFilter{
|
||||||
models.TagFilter{
|
{
|
||||||
Name: "badtag",
|
Name: "badtag",
|
||||||
Filter: []string{"othertag"},
|
Filter: []string{"othertag"},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
TagPass: []models.TagFilter{
|
TagPass: []models.TagFilter{
|
||||||
models.TagFilter{
|
{
|
||||||
Name: "goodtag",
|
Name: "goodtag",
|
||||||
Filter: []string{"mytag"},
|
Filter: []string{"mytag"},
|
||||||
},
|
},
|
||||||
|
@ -71,13 +71,13 @@ func TestConfig_LoadSingleInput(t *testing.T) {
|
||||||
FieldDrop: []string{"other", "stuff"},
|
FieldDrop: []string{"other", "stuff"},
|
||||||
FieldPass: []string{"some", "strings"},
|
FieldPass: []string{"some", "strings"},
|
||||||
TagDrop: []models.TagFilter{
|
TagDrop: []models.TagFilter{
|
||||||
models.TagFilter{
|
{
|
||||||
Name: "badtag",
|
Name: "badtag",
|
||||||
Filter: []string{"othertag"},
|
Filter: []string{"othertag"},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
TagPass: []models.TagFilter{
|
TagPass: []models.TagFilter{
|
||||||
models.TagFilter{
|
{
|
||||||
Name: "goodtag",
|
Name: "goodtag",
|
||||||
Filter: []string{"mytag"},
|
Filter: []string{"mytag"},
|
||||||
},
|
},
|
||||||
|
@ -117,13 +117,13 @@ func TestConfig_LoadDirectory(t *testing.T) {
|
||||||
FieldDrop: []string{"other", "stuff"},
|
FieldDrop: []string{"other", "stuff"},
|
||||||
FieldPass: []string{"some", "strings"},
|
FieldPass: []string{"some", "strings"},
|
||||||
TagDrop: []models.TagFilter{
|
TagDrop: []models.TagFilter{
|
||||||
models.TagFilter{
|
{
|
||||||
Name: "badtag",
|
Name: "badtag",
|
||||||
Filter: []string{"othertag"},
|
Filter: []string{"othertag"},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
TagPass: []models.TagFilter{
|
TagPass: []models.TagFilter{
|
||||||
models.TagFilter{
|
{
|
||||||
Name: "goodtag",
|
Name: "goodtag",
|
||||||
Filter: []string{"mytag"},
|
Filter: []string{"mytag"},
|
||||||
},
|
},
|
||||||
|
|
|
@ -79,13 +79,13 @@ func (f *Filter) Compile() error {
|
||||||
return fmt.Errorf("Error compiling 'taginclude', %s", err)
|
return fmt.Errorf("Error compiling 'taginclude', %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
for i, _ := range f.TagDrop {
|
for i := range f.TagDrop {
|
||||||
f.TagDrop[i].filter, err = filter.Compile(f.TagDrop[i].Filter)
|
f.TagDrop[i].filter, err = filter.Compile(f.TagDrop[i].Filter)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("Error compiling 'tagdrop', %s", err)
|
return fmt.Errorf("Error compiling 'tagdrop', %s", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for i, _ := range f.TagPass {
|
for i := range f.TagPass {
|
||||||
f.TagPass[i].filter, err = filter.Compile(f.TagPass[i].Filter)
|
f.TagPass[i].filter, err = filter.Compile(f.TagPass[i].Filter)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("Error compiling 'tagpass', %s", err)
|
return fmt.Errorf("Error compiling 'tagpass', %s", err)
|
||||||
|
|
|
@ -24,7 +24,7 @@ func TestFilter_ApplyEmpty(t *testing.T) {
|
||||||
|
|
||||||
func TestFilter_ApplyTagsDontPass(t *testing.T) {
|
func TestFilter_ApplyTagsDontPass(t *testing.T) {
|
||||||
filters := []TagFilter{
|
filters := []TagFilter{
|
||||||
TagFilter{
|
{
|
||||||
Name: "cpu",
|
Name: "cpu",
|
||||||
Filter: []string{"cpu-*"},
|
Filter: []string{"cpu-*"},
|
||||||
},
|
},
|
||||||
|
@ -244,11 +244,11 @@ func TestFilter_FieldDrop(t *testing.T) {
|
||||||
|
|
||||||
func TestFilter_TagPass(t *testing.T) {
|
func TestFilter_TagPass(t *testing.T) {
|
||||||
filters := []TagFilter{
|
filters := []TagFilter{
|
||||||
TagFilter{
|
{
|
||||||
Name: "cpu",
|
Name: "cpu",
|
||||||
Filter: []string{"cpu-*"},
|
Filter: []string{"cpu-*"},
|
||||||
},
|
},
|
||||||
TagFilter{
|
{
|
||||||
Name: "mem",
|
Name: "mem",
|
||||||
Filter: []string{"mem_free"},
|
Filter: []string{"mem_free"},
|
||||||
}}
|
}}
|
||||||
|
@ -258,19 +258,19 @@ func TestFilter_TagPass(t *testing.T) {
|
||||||
require.NoError(t, f.Compile())
|
require.NoError(t, f.Compile())
|
||||||
|
|
||||||
passes := [][]*telegraf.Tag{
|
passes := [][]*telegraf.Tag{
|
||||||
[]*telegraf.Tag{&telegraf.Tag{Key: "cpu", Value: "cpu-total"}},
|
{{Key: "cpu", Value: "cpu-total"}},
|
||||||
[]*telegraf.Tag{&telegraf.Tag{Key: "cpu", Value: "cpu-0"}},
|
{{Key: "cpu", Value: "cpu-0"}},
|
||||||
[]*telegraf.Tag{&telegraf.Tag{Key: "cpu", Value: "cpu-1"}},
|
{{Key: "cpu", Value: "cpu-1"}},
|
||||||
[]*telegraf.Tag{&telegraf.Tag{Key: "cpu", Value: "cpu-2"}},
|
{{Key: "cpu", Value: "cpu-2"}},
|
||||||
[]*telegraf.Tag{&telegraf.Tag{Key: "mem", Value: "mem_free"}},
|
{{Key: "mem", Value: "mem_free"}},
|
||||||
}
|
}
|
||||||
|
|
||||||
drops := [][]*telegraf.Tag{
|
drops := [][]*telegraf.Tag{
|
||||||
[]*telegraf.Tag{&telegraf.Tag{Key: "cpu", Value: "cputotal"}},
|
{{Key: "cpu", Value: "cputotal"}},
|
||||||
[]*telegraf.Tag{&telegraf.Tag{Key: "cpu", Value: "cpu0"}},
|
{{Key: "cpu", Value: "cpu0"}},
|
||||||
[]*telegraf.Tag{&telegraf.Tag{Key: "cpu", Value: "cpu1"}},
|
{{Key: "cpu", Value: "cpu1"}},
|
||||||
[]*telegraf.Tag{&telegraf.Tag{Key: "cpu", Value: "cpu2"}},
|
{{Key: "cpu", Value: "cpu2"}},
|
||||||
[]*telegraf.Tag{&telegraf.Tag{Key: "mem", Value: "mem_used"}},
|
{{Key: "mem", Value: "mem_used"}},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, tags := range passes {
|
for _, tags := range passes {
|
||||||
|
@ -288,11 +288,11 @@ func TestFilter_TagPass(t *testing.T) {
|
||||||
|
|
||||||
func TestFilter_TagDrop(t *testing.T) {
|
func TestFilter_TagDrop(t *testing.T) {
|
||||||
filters := []TagFilter{
|
filters := []TagFilter{
|
||||||
TagFilter{
|
{
|
||||||
Name: "cpu",
|
Name: "cpu",
|
||||||
Filter: []string{"cpu-*"},
|
Filter: []string{"cpu-*"},
|
||||||
},
|
},
|
||||||
TagFilter{
|
{
|
||||||
Name: "mem",
|
Name: "mem",
|
||||||
Filter: []string{"mem_free"},
|
Filter: []string{"mem_free"},
|
||||||
}}
|
}}
|
||||||
|
@ -302,19 +302,19 @@ func TestFilter_TagDrop(t *testing.T) {
|
||||||
require.NoError(t, f.Compile())
|
require.NoError(t, f.Compile())
|
||||||
|
|
||||||
drops := [][]*telegraf.Tag{
|
drops := [][]*telegraf.Tag{
|
||||||
[]*telegraf.Tag{&telegraf.Tag{Key: "cpu", Value: "cpu-total"}},
|
{{Key: "cpu", Value: "cpu-total"}},
|
||||||
[]*telegraf.Tag{&telegraf.Tag{Key: "cpu", Value: "cpu-0"}},
|
{{Key: "cpu", Value: "cpu-0"}},
|
||||||
[]*telegraf.Tag{&telegraf.Tag{Key: "cpu", Value: "cpu-1"}},
|
{{Key: "cpu", Value: "cpu-1"}},
|
||||||
[]*telegraf.Tag{&telegraf.Tag{Key: "cpu", Value: "cpu-2"}},
|
{{Key: "cpu", Value: "cpu-2"}},
|
||||||
[]*telegraf.Tag{&telegraf.Tag{Key: "mem", Value: "mem_free"}},
|
{{Key: "mem", Value: "mem_free"}},
|
||||||
}
|
}
|
||||||
|
|
||||||
passes := [][]*telegraf.Tag{
|
passes := [][]*telegraf.Tag{
|
||||||
[]*telegraf.Tag{&telegraf.Tag{Key: "cpu", Value: "cputotal"}},
|
{{Key: "cpu", Value: "cputotal"}},
|
||||||
[]*telegraf.Tag{&telegraf.Tag{Key: "cpu", Value: "cpu0"}},
|
{{Key: "cpu", Value: "cpu0"}},
|
||||||
[]*telegraf.Tag{&telegraf.Tag{Key: "cpu", Value: "cpu1"}},
|
{{Key: "cpu", Value: "cpu1"}},
|
||||||
[]*telegraf.Tag{&telegraf.Tag{Key: "cpu", Value: "cpu2"}},
|
{{Key: "cpu", Value: "cpu2"}},
|
||||||
[]*telegraf.Tag{&telegraf.Tag{Key: "mem", Value: "mem_used"}},
|
{{Key: "mem", Value: "mem_used"}},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, tags := range passes {
|
for _, tags := range passes {
|
||||||
|
@ -442,27 +442,27 @@ func TestFilter_FilterFieldPassAndDrop(t *testing.T) {
|
||||||
// see: https://github.com/influxdata/telegraf/issues/2860
|
// see: https://github.com/influxdata/telegraf/issues/2860
|
||||||
func TestFilter_FilterTagsPassAndDrop(t *testing.T) {
|
func TestFilter_FilterTagsPassAndDrop(t *testing.T) {
|
||||||
inputData := [][]*telegraf.Tag{
|
inputData := [][]*telegraf.Tag{
|
||||||
[]*telegraf.Tag{&telegraf.Tag{Key: "tag1", Value: "1"}, &telegraf.Tag{Key: "tag2", Value: "3"}},
|
{{Key: "tag1", Value: "1"}, {Key: "tag2", Value: "3"}},
|
||||||
[]*telegraf.Tag{&telegraf.Tag{Key: "tag1", Value: "1"}, &telegraf.Tag{Key: "tag2", Value: "2"}},
|
{{Key: "tag1", Value: "1"}, {Key: "tag2", Value: "2"}},
|
||||||
[]*telegraf.Tag{&telegraf.Tag{Key: "tag1", Value: "2"}, &telegraf.Tag{Key: "tag2", Value: "1"}},
|
{{Key: "tag1", Value: "2"}, {Key: "tag2", Value: "1"}},
|
||||||
[]*telegraf.Tag{&telegraf.Tag{Key: "tag1", Value: "4"}, &telegraf.Tag{Key: "tag2", Value: "1"}},
|
{{Key: "tag1", Value: "4"}, {Key: "tag2", Value: "1"}},
|
||||||
}
|
}
|
||||||
|
|
||||||
expectedResult := []bool{false, true, false, false}
|
expectedResult := []bool{false, true, false, false}
|
||||||
|
|
||||||
filterPass := []TagFilter{
|
filterPass := []TagFilter{
|
||||||
TagFilter{
|
{
|
||||||
Name: "tag1",
|
Name: "tag1",
|
||||||
Filter: []string{"1", "4"},
|
Filter: []string{"1", "4"},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
filterDrop := []TagFilter{
|
filterDrop := []TagFilter{
|
||||||
TagFilter{
|
{
|
||||||
Name: "tag1",
|
Name: "tag1",
|
||||||
Filter: []string{"4"},
|
Filter: []string{"4"},
|
||||||
},
|
},
|
||||||
TagFilter{
|
{
|
||||||
Name: "tag2",
|
Name: "tag2",
|
||||||
Filter: []string{"3"},
|
Filter: []string{"3"},
|
||||||
},
|
},
|
||||||
|
|
|
@ -59,7 +59,7 @@ func prettyToBytes(v string) uint64 {
|
||||||
}
|
}
|
||||||
var factor uint64
|
var factor uint64
|
||||||
factor = 1
|
factor = 1
|
||||||
prefix := v[len(v)-1 : len(v)]
|
prefix := v[len(v)-1:]
|
||||||
if factors[prefix] != 0 {
|
if factors[prefix] != 0 {
|
||||||
v = v[:len(v)-1]
|
v = v[:len(v)-1]
|
||||||
factor = factors[prefix]
|
factor = factors[prefix]
|
||||||
|
|
|
@ -278,7 +278,7 @@ func flatten(data interface{}) []*metric {
|
||||||
|
|
||||||
switch val := data.(type) {
|
switch val := data.(type) {
|
||||||
case float64:
|
case float64:
|
||||||
metrics = []*metric{&metric{make([]string, 0, 1), val}}
|
metrics = []*metric{{make([]string, 0, 1), val}}
|
||||||
case map[string]interface{}:
|
case map[string]interface{}:
|
||||||
metrics = make([]*metric, 0, len(val))
|
metrics = make([]*metric, 0, len(val))
|
||||||
for k, v := range val {
|
for k, v := range val {
|
||||||
|
|
|
@ -81,7 +81,7 @@ func TestGather(t *testing.T) {
|
||||||
}()
|
}()
|
||||||
|
|
||||||
findSockets = func(c *Ceph) ([]*socket, error) {
|
findSockets = func(c *Ceph) ([]*socket, error) {
|
||||||
return []*socket{&socket{"osd.1", typeOsd, ""}}, nil
|
return []*socket{{"osd.1", typeOsd, ""}}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
perfDump = func(binary string, s *socket) (string, error) {
|
perfDump = func(binary string, s *socket) (string, error) {
|
||||||
|
@ -190,17 +190,17 @@ type SockTest struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
var sockTestParams = []*SockTest{
|
var sockTestParams = []*SockTest{
|
||||||
&SockTest{
|
{
|
||||||
osds: 2,
|
osds: 2,
|
||||||
mons: 2,
|
mons: 2,
|
||||||
},
|
},
|
||||||
&SockTest{
|
{
|
||||||
mons: 1,
|
mons: 1,
|
||||||
},
|
},
|
||||||
&SockTest{
|
{
|
||||||
osds: 1,
|
osds: 1,
|
||||||
},
|
},
|
||||||
&SockTest{},
|
{},
|
||||||
}
|
}
|
||||||
|
|
||||||
var monPerfDump = `
|
var monPerfDump = `
|
||||||
|
|
|
@ -173,7 +173,7 @@ const valuePattern = "[\\d-]+"
|
||||||
|
|
||||||
var fileFormats = [...]fileFormat{
|
var fileFormats = [...]fileFormat{
|
||||||
// VAL\n
|
// VAL\n
|
||||||
fileFormat{
|
{
|
||||||
name: "Single value",
|
name: "Single value",
|
||||||
pattern: "^" + valuePattern + "\n$",
|
pattern: "^" + valuePattern + "\n$",
|
||||||
parser: func(measurement string, fields map[string]interface{}, b []byte) {
|
parser: func(measurement string, fields map[string]interface{}, b []byte) {
|
||||||
|
@ -185,7 +185,7 @@ var fileFormats = [...]fileFormat{
|
||||||
// VAL0\n
|
// VAL0\n
|
||||||
// VAL1\n
|
// VAL1\n
|
||||||
// ...
|
// ...
|
||||||
fileFormat{
|
{
|
||||||
name: "New line separated values",
|
name: "New line separated values",
|
||||||
pattern: "^(" + valuePattern + "\n){2,}$",
|
pattern: "^(" + valuePattern + "\n){2,}$",
|
||||||
parser: func(measurement string, fields map[string]interface{}, b []byte) {
|
parser: func(measurement string, fields map[string]interface{}, b []byte) {
|
||||||
|
@ -197,7 +197,7 @@ var fileFormats = [...]fileFormat{
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
// VAL0 VAL1 ...\n
|
// VAL0 VAL1 ...\n
|
||||||
fileFormat{
|
{
|
||||||
name: "Space separated values",
|
name: "Space separated values",
|
||||||
pattern: "^(" + valuePattern + " )+\n$",
|
pattern: "^(" + valuePattern + " )+\n$",
|
||||||
parser: func(measurement string, fields map[string]interface{}, b []byte) {
|
parser: func(measurement string, fields map[string]interface{}, b []byte) {
|
||||||
|
@ -211,7 +211,7 @@ var fileFormats = [...]fileFormat{
|
||||||
// KEY0 VAL0\n
|
// KEY0 VAL0\n
|
||||||
// KEY1 VAL1\n
|
// KEY1 VAL1\n
|
||||||
// ...
|
// ...
|
||||||
fileFormat{
|
{
|
||||||
name: "New line separated key-space-value's",
|
name: "New line separated key-space-value's",
|
||||||
pattern: "^(" + keyPattern + " " + valuePattern + "\n)+$",
|
pattern: "^(" + keyPattern + " " + valuePattern + "\n)+$",
|
||||||
parser: func(measurement string, fields map[string]interface{}, b []byte) {
|
parser: func(measurement string, fields map[string]interface{}, b []byte) {
|
||||||
|
|
|
@ -18,7 +18,7 @@ func (m *mockGatherCloudWatchClient) ListMetrics(params *cloudwatch.ListMetricsI
|
||||||
Namespace: params.Namespace,
|
Namespace: params.Namespace,
|
||||||
MetricName: aws.String("Latency"),
|
MetricName: aws.String("Latency"),
|
||||||
Dimensions: []*cloudwatch.Dimension{
|
Dimensions: []*cloudwatch.Dimension{
|
||||||
&cloudwatch.Dimension{
|
{
|
||||||
Name: aws.String("LoadBalancerName"),
|
Name: aws.String("LoadBalancerName"),
|
||||||
Value: aws.String("p-example"),
|
Value: aws.String("p-example"),
|
||||||
},
|
},
|
||||||
|
@ -100,7 +100,7 @@ func (m *mockSelectMetricsCloudWatchClient) ListMetrics(params *cloudwatch.ListM
|
||||||
Namespace: aws.String("AWS/ELB"),
|
Namespace: aws.String("AWS/ELB"),
|
||||||
MetricName: aws.String(m),
|
MetricName: aws.String(m),
|
||||||
Dimensions: []*cloudwatch.Dimension{
|
Dimensions: []*cloudwatch.Dimension{
|
||||||
&cloudwatch.Dimension{
|
{
|
||||||
Name: aws.String("LoadBalancerName"),
|
Name: aws.String("LoadBalancerName"),
|
||||||
Value: aws.String(lb),
|
Value: aws.String(lb),
|
||||||
},
|
},
|
||||||
|
@ -112,11 +112,11 @@ func (m *mockSelectMetricsCloudWatchClient) ListMetrics(params *cloudwatch.ListM
|
||||||
Namespace: aws.String("AWS/ELB"),
|
Namespace: aws.String("AWS/ELB"),
|
||||||
MetricName: aws.String(m),
|
MetricName: aws.String(m),
|
||||||
Dimensions: []*cloudwatch.Dimension{
|
Dimensions: []*cloudwatch.Dimension{
|
||||||
&cloudwatch.Dimension{
|
{
|
||||||
Name: aws.String("LoadBalancerName"),
|
Name: aws.String("LoadBalancerName"),
|
||||||
Value: aws.String(lb),
|
Value: aws.String(lb),
|
||||||
},
|
},
|
||||||
&cloudwatch.Dimension{
|
{
|
||||||
Name: aws.String("AvailabilityZone"),
|
Name: aws.String("AvailabilityZone"),
|
||||||
Value: aws.String(az),
|
Value: aws.String(az),
|
||||||
},
|
},
|
||||||
|
@ -148,14 +148,14 @@ func TestSelectMetrics(t *testing.T) {
|
||||||
Period: internalDuration,
|
Period: internalDuration,
|
||||||
RateLimit: 200,
|
RateLimit: 200,
|
||||||
Metrics: []*Metric{
|
Metrics: []*Metric{
|
||||||
&Metric{
|
{
|
||||||
MetricNames: []string{"Latency", "RequestCount"},
|
MetricNames: []string{"Latency", "RequestCount"},
|
||||||
Dimensions: []*Dimension{
|
Dimensions: []*Dimension{
|
||||||
&Dimension{
|
{
|
||||||
Name: "LoadBalancerName",
|
Name: "LoadBalancerName",
|
||||||
Value: "*",
|
Value: "*",
|
||||||
},
|
},
|
||||||
&Dimension{
|
{
|
||||||
Name: "AvailabilityZone",
|
Name: "AvailabilityZone",
|
||||||
Value: "*",
|
Value: "*",
|
||||||
},
|
},
|
||||||
|
|
|
@ -8,7 +8,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
var sampleChecks = []*api.HealthCheck{
|
var sampleChecks = []*api.HealthCheck{
|
||||||
&api.HealthCheck{
|
{
|
||||||
Node: "localhost",
|
Node: "localhost",
|
||||||
CheckID: "foo.health123",
|
CheckID: "foo.health123",
|
||||||
Name: "foo.health",
|
Name: "foo.health",
|
||||||
|
|
|
@ -163,7 +163,7 @@ func TestCPUCountIncrease(t *testing.T) {
|
||||||
|
|
||||||
mps.On("CPUTimes").Return(
|
mps.On("CPUTimes").Return(
|
||||||
[]cpu.TimesStat{
|
[]cpu.TimesStat{
|
||||||
cpu.TimesStat{
|
{
|
||||||
CPU: "cpu0",
|
CPU: "cpu0",
|
||||||
},
|
},
|
||||||
}, nil)
|
}, nil)
|
||||||
|
@ -173,10 +173,10 @@ func TestCPUCountIncrease(t *testing.T) {
|
||||||
|
|
||||||
mps2.On("CPUTimes").Return(
|
mps2.On("CPUTimes").Return(
|
||||||
[]cpu.TimesStat{
|
[]cpu.TimesStat{
|
||||||
cpu.TimesStat{
|
{
|
||||||
CPU: "cpu0",
|
CPU: "cpu0",
|
||||||
},
|
},
|
||||||
cpu.TimesStat{
|
{
|
||||||
CPU: "cpu1",
|
CPU: "cpu1",
|
||||||
},
|
},
|
||||||
}, nil)
|
}, nil)
|
||||||
|
|
|
@ -115,8 +115,8 @@ func TestGetSummary(t *testing.T) {
|
||||||
expectedValue: &Summary{
|
expectedValue: &Summary{
|
||||||
Cluster: "a",
|
Cluster: "a",
|
||||||
Slaves: []Slave{
|
Slaves: []Slave{
|
||||||
Slave{ID: "a"},
|
{ID: "a"},
|
||||||
Slave{ID: "b"},
|
{ID: "b"},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
expectedError: nil,
|
expectedError: nil,
|
||||||
|
|
|
@ -385,8 +385,8 @@ func TestGatherFilterNode(t *testing.T) {
|
||||||
return &Summary{
|
return &Summary{
|
||||||
Cluster: "a",
|
Cluster: "a",
|
||||||
Slaves: []Slave{
|
Slaves: []Slave{
|
||||||
Slave{ID: "x"},
|
{ID: "x"},
|
||||||
Slave{ID: "y"},
|
{ID: "y"},
|
||||||
},
|
},
|
||||||
}, nil
|
}, nil
|
||||||
},
|
},
|
||||||
|
|
|
@ -138,7 +138,7 @@ func TestDiskUsageHostMountPrefix(t *testing.T) {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
usageStats: []*disk.UsageStat{
|
usageStats: []*disk.UsageStat{
|
||||||
&disk.UsageStat{
|
{
|
||||||
Path: "/",
|
Path: "/",
|
||||||
Total: 42,
|
Total: 42,
|
||||||
},
|
},
|
||||||
|
@ -170,7 +170,7 @@ func TestDiskUsageHostMountPrefix(t *testing.T) {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
usageStats: []*disk.UsageStat{
|
usageStats: []*disk.UsageStat{
|
||||||
&disk.UsageStat{
|
{
|
||||||
Path: "/hostfs/var",
|
Path: "/hostfs/var",
|
||||||
Total: 42,
|
Total: 42,
|
||||||
},
|
},
|
||||||
|
@ -203,7 +203,7 @@ func TestDiskUsageHostMountPrefix(t *testing.T) {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
usageStats: []*disk.UsageStat{
|
usageStats: []*disk.UsageStat{
|
||||||
&disk.UsageStat{
|
{
|
||||||
Path: "/hostfs",
|
Path: "/hostfs",
|
||||||
Total: 42,
|
Total: 42,
|
||||||
},
|
},
|
||||||
|
|
|
@ -30,7 +30,7 @@ func TestDiskIO(t *testing.T) {
|
||||||
name: "minimal",
|
name: "minimal",
|
||||||
result: Result{
|
result: Result{
|
||||||
stats: map[string]disk.IOCountersStat{
|
stats: map[string]disk.IOCountersStat{
|
||||||
"sda": disk.IOCountersStat{
|
"sda": {
|
||||||
ReadCount: 888,
|
ReadCount: 888,
|
||||||
WriteCount: 5341,
|
WriteCount: 5341,
|
||||||
ReadBytes: 100000,
|
ReadBytes: 100000,
|
||||||
|
@ -46,7 +46,7 @@ func TestDiskIO(t *testing.T) {
|
||||||
},
|
},
|
||||||
err: nil,
|
err: nil,
|
||||||
metrics: []Metric{
|
metrics: []Metric{
|
||||||
Metric{
|
{
|
||||||
tags: map[string]string{
|
tags: map[string]string{
|
||||||
"name": "sda",
|
"name": "sda",
|
||||||
"serial": "ab-123-ad",
|
"serial": "ab-123-ad",
|
||||||
|
@ -70,11 +70,11 @@ func TestDiskIO(t *testing.T) {
|
||||||
devices: []string{"sd*"},
|
devices: []string{"sd*"},
|
||||||
result: Result{
|
result: Result{
|
||||||
stats: map[string]disk.IOCountersStat{
|
stats: map[string]disk.IOCountersStat{
|
||||||
"sda": disk.IOCountersStat{
|
"sda": {
|
||||||
Name: "sda",
|
Name: "sda",
|
||||||
ReadCount: 42,
|
ReadCount: 42,
|
||||||
},
|
},
|
||||||
"vda": disk.IOCountersStat{
|
"vda": {
|
||||||
Name: "vda",
|
Name: "vda",
|
||||||
ReadCount: 42,
|
ReadCount: 42,
|
||||||
},
|
},
|
||||||
|
@ -83,7 +83,7 @@ func TestDiskIO(t *testing.T) {
|
||||||
},
|
},
|
||||||
err: nil,
|
err: nil,
|
||||||
metrics: []Metric{
|
metrics: []Metric{
|
||||||
Metric{
|
{
|
||||||
tags: map[string]string{
|
tags: map[string]string{
|
||||||
"name": "sda",
|
"name": "sda",
|
||||||
"serial": "unknown",
|
"serial": "unknown",
|
||||||
|
|
|
@ -678,35 +678,35 @@ func TestContainerStateFilter(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "default",
|
name: "default",
|
||||||
expected: map[string][]string{
|
expected: map[string][]string{
|
||||||
"status": []string{"running"},
|
"status": {"running"},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "include running",
|
name: "include running",
|
||||||
include: []string{"running"},
|
include: []string{"running"},
|
||||||
expected: map[string][]string{
|
expected: map[string][]string{
|
||||||
"status": []string{"running"},
|
"status": {"running"},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "include glob",
|
name: "include glob",
|
||||||
include: []string{"r*"},
|
include: []string{"r*"},
|
||||||
expected: map[string][]string{
|
expected: map[string][]string{
|
||||||
"status": []string{"restarting", "running", "removing"},
|
"status": {"restarting", "running", "removing"},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "include all",
|
name: "include all",
|
||||||
include: []string{"*"},
|
include: []string{"*"},
|
||||||
expected: map[string][]string{
|
expected: map[string][]string{
|
||||||
"status": []string{"created", "restarting", "running", "removing", "paused", "exited", "dead"},
|
"status": {"created", "restarting", "running", "removing", "paused", "exited", "dead"},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "exclude all",
|
name: "exclude all",
|
||||||
exclude: []string{"*"},
|
exclude: []string{"*"},
|
||||||
expected: map[string][]string{
|
expected: map[string][]string{
|
||||||
"status": []string{},
|
"status": {},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -714,7 +714,7 @@ func TestContainerStateFilter(t *testing.T) {
|
||||||
include: []string{"*"},
|
include: []string{"*"},
|
||||||
exclude: []string{"exited"},
|
exclude: []string{"exited"},
|
||||||
expected: map[string][]string{
|
expected: map[string][]string{
|
||||||
"status": []string{"created", "restarting", "running", "removing", "paused", "dead"},
|
"status": {"created", "restarting", "running", "removing", "paused", "dead"},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -60,7 +60,7 @@ var info = types.Info{
|
||||||
}
|
}
|
||||||
|
|
||||||
var containerList = []types.Container{
|
var containerList = []types.Container{
|
||||||
types.Container{
|
{
|
||||||
ID: "e2173b9478a6ae55e237d4d74f8bbb753f0817192b5081334dc78476296b7dfb",
|
ID: "e2173b9478a6ae55e237d4d74f8bbb753f0817192b5081334dc78476296b7dfb",
|
||||||
Names: []string{"/etcd"},
|
Names: []string{"/etcd"},
|
||||||
Image: "quay.io/coreos/etcd:v2.2.2",
|
Image: "quay.io/coreos/etcd:v2.2.2",
|
||||||
|
@ -68,22 +68,22 @@ var containerList = []types.Container{
|
||||||
Created: 1455941930,
|
Created: 1455941930,
|
||||||
Status: "Up 4 hours",
|
Status: "Up 4 hours",
|
||||||
Ports: []types.Port{
|
Ports: []types.Port{
|
||||||
types.Port{
|
{
|
||||||
PrivatePort: 7001,
|
PrivatePort: 7001,
|
||||||
PublicPort: 0,
|
PublicPort: 0,
|
||||||
Type: "tcp",
|
Type: "tcp",
|
||||||
},
|
},
|
||||||
types.Port{
|
{
|
||||||
PrivatePort: 4001,
|
PrivatePort: 4001,
|
||||||
PublicPort: 0,
|
PublicPort: 0,
|
||||||
Type: "tcp",
|
Type: "tcp",
|
||||||
},
|
},
|
||||||
types.Port{
|
{
|
||||||
PrivatePort: 2380,
|
PrivatePort: 2380,
|
||||||
PublicPort: 0,
|
PublicPort: 0,
|
||||||
Type: "tcp",
|
Type: "tcp",
|
||||||
},
|
},
|
||||||
types.Port{
|
{
|
||||||
PrivatePort: 2379,
|
PrivatePort: 2379,
|
||||||
PublicPort: 2379,
|
PublicPort: 2379,
|
||||||
Type: "tcp",
|
Type: "tcp",
|
||||||
|
@ -97,7 +97,7 @@ var containerList = []types.Container{
|
||||||
SizeRw: 0,
|
SizeRw: 0,
|
||||||
SizeRootFs: 0,
|
SizeRootFs: 0,
|
||||||
},
|
},
|
||||||
types.Container{
|
{
|
||||||
ID: "b7dfbb9478a6ae55e237d4d74f8bbb753f0817192b5081334dc78476296e2173",
|
ID: "b7dfbb9478a6ae55e237d4d74f8bbb753f0817192b5081334dc78476296e2173",
|
||||||
Names: []string{"/etcd2"},
|
Names: []string{"/etcd2"},
|
||||||
Image: "quay.io:4443/coreos/etcd:v2.2.2",
|
Image: "quay.io:4443/coreos/etcd:v2.2.2",
|
||||||
|
@ -105,22 +105,22 @@ var containerList = []types.Container{
|
||||||
Created: 1455941933,
|
Created: 1455941933,
|
||||||
Status: "Up 4 hours",
|
Status: "Up 4 hours",
|
||||||
Ports: []types.Port{
|
Ports: []types.Port{
|
||||||
types.Port{
|
{
|
||||||
PrivatePort: 7002,
|
PrivatePort: 7002,
|
||||||
PublicPort: 0,
|
PublicPort: 0,
|
||||||
Type: "tcp",
|
Type: "tcp",
|
||||||
},
|
},
|
||||||
types.Port{
|
{
|
||||||
PrivatePort: 4002,
|
PrivatePort: 4002,
|
||||||
PublicPort: 0,
|
PublicPort: 0,
|
||||||
Type: "tcp",
|
Type: "tcp",
|
||||||
},
|
},
|
||||||
types.Port{
|
{
|
||||||
PrivatePort: 2381,
|
PrivatePort: 2381,
|
||||||
PublicPort: 0,
|
PublicPort: 0,
|
||||||
Type: "tcp",
|
Type: "tcp",
|
||||||
},
|
},
|
||||||
types.Port{
|
{
|
||||||
PrivatePort: 2382,
|
PrivatePort: 2382,
|
||||||
PublicPort: 2382,
|
PublicPort: 2382,
|
||||||
Type: "tcp",
|
Type: "tcp",
|
||||||
|
@ -134,15 +134,15 @@ var containerList = []types.Container{
|
||||||
SizeRw: 0,
|
SizeRw: 0,
|
||||||
SizeRootFs: 0,
|
SizeRootFs: 0,
|
||||||
},
|
},
|
||||||
types.Container{
|
{
|
||||||
ID: "e8a713dd90604f5a257b97c15945e047ab60ed5b2c4397c5a6b5bf40e1bd2791",
|
ID: "e8a713dd90604f5a257b97c15945e047ab60ed5b2c4397c5a6b5bf40e1bd2791",
|
||||||
Names: []string{"/acme"},
|
Names: []string{"/acme"},
|
||||||
},
|
},
|
||||||
types.Container{
|
{
|
||||||
ID: "9bc6faf9ba8106fae32e8faafd38a1dd6f6d262bec172398cc10bc03c0d6841a",
|
ID: "9bc6faf9ba8106fae32e8faafd38a1dd6f6d262bec172398cc10bc03c0d6841a",
|
||||||
Names: []string{"/acme-test"},
|
Names: []string{"/acme-test"},
|
||||||
},
|
},
|
||||||
types.Container{
|
{
|
||||||
ID: "d4ccced494a1d5fe8ebdb0a86335a0dab069319912221e5838a132ab18a8bc84",
|
ID: "d4ccced494a1d5fe8ebdb0a86335a0dab069319912221e5838a132ab18a8bc84",
|
||||||
Names: []string{"/foo"},
|
Names: []string{"/foo"},
|
||||||
},
|
},
|
||||||
|
@ -150,7 +150,7 @@ var containerList = []types.Container{
|
||||||
|
|
||||||
var two = uint64(2)
|
var two = uint64(2)
|
||||||
var ServiceList = []swarm.Service{
|
var ServiceList = []swarm.Service{
|
||||||
swarm.Service{
|
{
|
||||||
ID: "qolkls9g5iasdiuihcyz9rnx2",
|
ID: "qolkls9g5iasdiuihcyz9rnx2",
|
||||||
Spec: swarm.ServiceSpec{
|
Spec: swarm.ServiceSpec{
|
||||||
Annotations: swarm.Annotations{
|
Annotations: swarm.Annotations{
|
||||||
|
@ -163,7 +163,7 @@ var ServiceList = []swarm.Service{
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
swarm.Service{
|
{
|
||||||
ID: "qolkls9g5iasdiuihcyz9rn3",
|
ID: "qolkls9g5iasdiuihcyz9rn3",
|
||||||
Spec: swarm.ServiceSpec{
|
Spec: swarm.ServiceSpec{
|
||||||
Annotations: swarm.Annotations{
|
Annotations: swarm.Annotations{
|
||||||
|
@ -177,7 +177,7 @@ var ServiceList = []swarm.Service{
|
||||||
}
|
}
|
||||||
|
|
||||||
var TaskList = []swarm.Task{
|
var TaskList = []swarm.Task{
|
||||||
swarm.Task{
|
{
|
||||||
ID: "kwh0lv7hwwbh",
|
ID: "kwh0lv7hwwbh",
|
||||||
ServiceID: "qolkls9g5iasdiuihcyz9rnx2",
|
ServiceID: "qolkls9g5iasdiuihcyz9rnx2",
|
||||||
NodeID: "0cl4jturcyd1ks3fwpd010kor",
|
NodeID: "0cl4jturcyd1ks3fwpd010kor",
|
||||||
|
@ -186,7 +186,7 @@ var TaskList = []swarm.Task{
|
||||||
},
|
},
|
||||||
DesiredState: "running",
|
DesiredState: "running",
|
||||||
},
|
},
|
||||||
swarm.Task{
|
{
|
||||||
ID: "u78m5ojbivc3",
|
ID: "u78m5ojbivc3",
|
||||||
ServiceID: "qolkls9g5iasdiuihcyz9rnx2",
|
ServiceID: "qolkls9g5iasdiuihcyz9rnx2",
|
||||||
NodeID: "0cl4jturcyd1ks3fwpd010kor",
|
NodeID: "0cl4jturcyd1ks3fwpd010kor",
|
||||||
|
@ -195,7 +195,7 @@ var TaskList = []swarm.Task{
|
||||||
},
|
},
|
||||||
DesiredState: "running",
|
DesiredState: "running",
|
||||||
},
|
},
|
||||||
swarm.Task{
|
{
|
||||||
ID: "1n1uilkhr98l",
|
ID: "1n1uilkhr98l",
|
||||||
ServiceID: "qolkls9g5iasdiuihcyz9rn3",
|
ServiceID: "qolkls9g5iasdiuihcyz9rn3",
|
||||||
NodeID: "0cl4jturcyd1ks3fwpd010kor",
|
NodeID: "0cl4jturcyd1ks3fwpd010kor",
|
||||||
|
@ -207,13 +207,13 @@ var TaskList = []swarm.Task{
|
||||||
}
|
}
|
||||||
|
|
||||||
var NodeList = []swarm.Node{
|
var NodeList = []swarm.Node{
|
||||||
swarm.Node{
|
{
|
||||||
ID: "0cl4jturcyd1ks3fwpd010kor",
|
ID: "0cl4jturcyd1ks3fwpd010kor",
|
||||||
Status: swarm.NodeStatus{
|
Status: swarm.NodeStatus{
|
||||||
State: "ready",
|
State: "ready",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
swarm.Node{
|
{
|
||||||
ID: "0cl4jturcyd1ks3fwpd010kor",
|
ID: "0cl4jturcyd1ks3fwpd010kor",
|
||||||
Status: swarm.NodeStatus{
|
Status: swarm.NodeStatus{
|
||||||
State: "ready",
|
State: "ready",
|
||||||
|
|
|
@ -135,7 +135,7 @@ func (c *mockHTTPClient) HTTPClient() *http.Client {
|
||||||
// *HttpJson: Pointer to an HttpJson object that uses the generated mock HTTP client
|
// *HttpJson: Pointer to an HttpJson object that uses the generated mock HTTP client
|
||||||
func genMockGrayLog(response string, statusCode int) []*GrayLog {
|
func genMockGrayLog(response string, statusCode int) []*GrayLog {
|
||||||
return []*GrayLog{
|
return []*GrayLog{
|
||||||
&GrayLog{
|
{
|
||||||
client: &mockHTTPClient{responseBody: response, statusCode: statusCode},
|
client: &mockHTTPClient{responseBody: response, statusCode: statusCode},
|
||||||
Servers: []string{
|
Servers: []string{
|
||||||
"http://localhost:12900/system/metrics/multiple",
|
"http://localhost:12900/system/metrics/multiple",
|
||||||
|
|
|
@ -14,13 +14,13 @@ type mockFetcher struct {
|
||||||
|
|
||||||
func (h *mockFetcher) Fetch(address string) ([]hddtemp.Disk, error) {
|
func (h *mockFetcher) Fetch(address string) ([]hddtemp.Disk, error) {
|
||||||
return []hddtemp.Disk{
|
return []hddtemp.Disk{
|
||||||
hddtemp.Disk{
|
{
|
||||||
DeviceName: "Disk1",
|
DeviceName: "Disk1",
|
||||||
Model: "Model1",
|
Model: "Model1",
|
||||||
Temperature: 13,
|
Temperature: 13,
|
||||||
Unit: "C",
|
Unit: "C",
|
||||||
},
|
},
|
||||||
hddtemp.Disk{
|
{
|
||||||
DeviceName: "Disk2",
|
DeviceName: "Disk2",
|
||||||
Model: "Model2",
|
Model: "Model2",
|
||||||
Temperature: 14,
|
Temperature: 14,
|
||||||
|
|
|
@ -163,7 +163,7 @@ func (c *mockHTTPClient) HTTPClient() *http.Client {
|
||||||
// *HttpJson: Pointer to an HttpJson object that uses the generated mock HTTP client
|
// *HttpJson: Pointer to an HttpJson object that uses the generated mock HTTP client
|
||||||
func genMockHttpJson(response string, statusCode int) []*HttpJson {
|
func genMockHttpJson(response string, statusCode int) []*HttpJson {
|
||||||
return []*HttpJson{
|
return []*HttpJson{
|
||||||
&HttpJson{
|
{
|
||||||
client: &mockHTTPClient{responseBody: response, statusCode: statusCode},
|
client: &mockHTTPClient{responseBody: response, statusCode: statusCode},
|
||||||
Servers: []string{
|
Servers: []string{
|
||||||
"http://server1.example.com/metrics/",
|
"http://server1.example.com/metrics/",
|
||||||
|
@ -180,7 +180,7 @@ func genMockHttpJson(response string, statusCode int) []*HttpJson {
|
||||||
"apiVersion": "v1",
|
"apiVersion": "v1",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
&HttpJson{
|
{
|
||||||
client: &mockHTTPClient{responseBody: response, statusCode: statusCode},
|
client: &mockHTTPClient{responseBody: response, statusCode: statusCode},
|
||||||
Servers: []string{
|
Servers: []string{
|
||||||
"http://server3.example.com/metrics/",
|
"http://server3.example.com/metrics/",
|
||||||
|
|
|
@ -60,7 +60,7 @@ scan:
|
||||||
}
|
}
|
||||||
irqid := strings.TrimRight(fields[0], ":")
|
irqid := strings.TrimRight(fields[0], ":")
|
||||||
irq := NewIRQ(irqid)
|
irq := NewIRQ(irqid)
|
||||||
irqvals := fields[1:len(fields)]
|
irqvals := fields[1:]
|
||||||
for i := 0; i < cpucount; i++ {
|
for i := 0; i < cpucount; i++ {
|
||||||
if i < len(irqvals) {
|
if i < len(irqvals) {
|
||||||
irqval, err := strconv.ParseInt(irqvals[i], 10, 64)
|
irqval, err := strconv.ParseInt(irqvals[i], 10, 64)
|
||||||
|
|
|
@ -19,31 +19,31 @@ NET_RX: 867028 225
|
||||||
TASKLET: 205 0`
|
TASKLET: 205 0`
|
||||||
f := bytes.NewBufferString(interruptStr)
|
f := bytes.NewBufferString(interruptStr)
|
||||||
parsed := []IRQ{
|
parsed := []IRQ{
|
||||||
IRQ{
|
{
|
||||||
ID: "0", Type: "IO-APIC-edge", Device: "timer",
|
ID: "0", Type: "IO-APIC-edge", Device: "timer",
|
||||||
Cpus: []int64{int64(134), int64(0)}, Total: int64(134),
|
Cpus: []int64{int64(134), int64(0)}, Total: int64(134),
|
||||||
},
|
},
|
||||||
IRQ{
|
{
|
||||||
ID: "1", Type: "IO-APIC-edge", Device: "i8042",
|
ID: "1", Type: "IO-APIC-edge", Device: "i8042",
|
||||||
Cpus: []int64{int64(7), int64(3)}, Total: int64(10),
|
Cpus: []int64{int64(7), int64(3)}, Total: int64(10),
|
||||||
},
|
},
|
||||||
IRQ{
|
{
|
||||||
ID: "NMI", Type: "Non-maskable interrupts",
|
ID: "NMI", Type: "Non-maskable interrupts",
|
||||||
Cpus: []int64{int64(0), int64(0)}, Total: int64(0),
|
Cpus: []int64{int64(0), int64(0)}, Total: int64(0),
|
||||||
},
|
},
|
||||||
IRQ{
|
{
|
||||||
ID: "LOC", Type: "Local timer interrupts",
|
ID: "LOC", Type: "Local timer interrupts",
|
||||||
Cpus: []int64{int64(2338608687), int64(2334309625)},
|
Cpus: []int64{int64(2338608687), int64(2334309625)},
|
||||||
Total: int64(4672918312),
|
Total: int64(4672918312),
|
||||||
},
|
},
|
||||||
IRQ{
|
{
|
||||||
ID: "MIS", Cpus: []int64{int64(0)}, Total: int64(0),
|
ID: "MIS", Cpus: []int64{int64(0)}, Total: int64(0),
|
||||||
},
|
},
|
||||||
IRQ{
|
{
|
||||||
ID: "NET_RX", Cpus: []int64{int64(867028), int64(225)},
|
ID: "NET_RX", Cpus: []int64{int64(867028), int64(225)},
|
||||||
Total: int64(867253),
|
Total: int64(867253),
|
||||||
},
|
},
|
||||||
IRQ{
|
{
|
||||||
ID: "TASKLET", Cpus: []int64{int64(205), int64(0)},
|
ID: "TASKLET", Cpus: []int64{int64(205), int64(0)},
|
||||||
Total: int64(205),
|
Total: int64(205),
|
||||||
},
|
},
|
||||||
|
@ -88,91 +88,91 @@ func TestParseInterruptsBad(t *testing.T) {
|
||||||
IPI6: 0 0 0 0 completion interrupts`
|
IPI6: 0 0 0 0 completion interrupts`
|
||||||
f := bytes.NewBufferString(interruptStr)
|
f := bytes.NewBufferString(interruptStr)
|
||||||
parsed := []IRQ{
|
parsed := []IRQ{
|
||||||
IRQ{
|
{
|
||||||
ID: "16", Type: "bcm2836-timer", Device: "0 Edge arch_timer",
|
ID: "16", Type: "bcm2836-timer", Device: "0 Edge arch_timer",
|
||||||
Cpus: []int64{0, 0, 0, 0},
|
Cpus: []int64{0, 0, 0, 0},
|
||||||
},
|
},
|
||||||
IRQ{
|
{
|
||||||
ID: "17", Type: "bcm2836-timer", Device: "1 Edge arch_timer",
|
ID: "17", Type: "bcm2836-timer", Device: "1 Edge arch_timer",
|
||||||
Cpus: []int64{127224250, 118424219, 127224437, 117885416}, Total: 490758322,
|
Cpus: []int64{127224250, 118424219, 127224437, 117885416}, Total: 490758322,
|
||||||
},
|
},
|
||||||
IRQ{
|
{
|
||||||
ID: "21", Type: "bcm2836-pmu", Device: "9 Edge arm-pmu",
|
ID: "21", Type: "bcm2836-pmu", Device: "9 Edge arm-pmu",
|
||||||
Cpus: []int64{0, 0, 0, 0},
|
Cpus: []int64{0, 0, 0, 0},
|
||||||
},
|
},
|
||||||
IRQ{
|
{
|
||||||
ID: "23", Type: "ARMCTRL-level", Device: "1 Edge 3f00b880.mailbox",
|
ID: "23", Type: "ARMCTRL-level", Device: "1 Edge 3f00b880.mailbox",
|
||||||
Cpus: []int64{1549514, 0, 0, 0}, Total: 1549514,
|
Cpus: []int64{1549514, 0, 0, 0}, Total: 1549514,
|
||||||
},
|
},
|
||||||
IRQ{
|
{
|
||||||
ID: "24", Type: "ARMCTRL-level", Device: "2 Edge VCHIQ doorbell",
|
ID: "24", Type: "ARMCTRL-level", Device: "2 Edge VCHIQ doorbell",
|
||||||
Cpus: []int64{2, 0, 0, 0}, Total: 2,
|
Cpus: []int64{2, 0, 0, 0}, Total: 2,
|
||||||
},
|
},
|
||||||
IRQ{
|
{
|
||||||
ID: "46", Type: "ARMCTRL-level", Device: "48 Edge bcm2708_fb dma",
|
ID: "46", Type: "ARMCTRL-level", Device: "48 Edge bcm2708_fb dma",
|
||||||
Cpus: []int64{0, 0, 0, 0},
|
Cpus: []int64{0, 0, 0, 0},
|
||||||
},
|
},
|
||||||
IRQ{
|
{
|
||||||
ID: "48", Type: "ARMCTRL-level", Device: "50 Edge DMA IRQ",
|
ID: "48", Type: "ARMCTRL-level", Device: "50 Edge DMA IRQ",
|
||||||
Cpus: []int64{0, 0, 0, 0},
|
Cpus: []int64{0, 0, 0, 0},
|
||||||
},
|
},
|
||||||
IRQ{
|
{
|
||||||
ID: "50", Type: "ARMCTRL-level", Device: "52 Edge DMA IRQ",
|
ID: "50", Type: "ARMCTRL-level", Device: "52 Edge DMA IRQ",
|
||||||
Cpus: []int64{0, 0, 0, 0},
|
Cpus: []int64{0, 0, 0, 0},
|
||||||
},
|
},
|
||||||
IRQ{
|
{
|
||||||
ID: "51", Type: "ARMCTRL-level", Device: "53 Edge DMA IRQ",
|
ID: "51", Type: "ARMCTRL-level", Device: "53 Edge DMA IRQ",
|
||||||
Cpus: []int64{208, 0, 0, 0}, Total: 208,
|
Cpus: []int64{208, 0, 0, 0}, Total: 208,
|
||||||
},
|
},
|
||||||
IRQ{
|
{
|
||||||
ID: "54", Type: "ARMCTRL-level", Device: "56 Edge DMA IRQ",
|
ID: "54", Type: "ARMCTRL-level", Device: "56 Edge DMA IRQ",
|
||||||
Cpus: []int64{883002, 0, 0, 0}, Total: 883002,
|
Cpus: []int64{883002, 0, 0, 0}, Total: 883002,
|
||||||
},
|
},
|
||||||
IRQ{
|
{
|
||||||
ID: "59", Type: "ARMCTRL-level", Device: "61 Edge bcm2835-auxirq",
|
ID: "59", Type: "ARMCTRL-level", Device: "61 Edge bcm2835-auxirq",
|
||||||
Cpus: []int64{0, 0, 0, 0},
|
Cpus: []int64{0, 0, 0, 0},
|
||||||
},
|
},
|
||||||
IRQ{
|
{
|
||||||
ID: "62", Type: "ARMCTRL-level", Device: "64 Edge dwc_otg, dwc_otg_pcd, dwc_otg_hcd:usb1",
|
ID: "62", Type: "ARMCTRL-level", Device: "64 Edge dwc_otg, dwc_otg_pcd, dwc_otg_hcd:usb1",
|
||||||
Cpus: []int64{521451447, 0, 0, 0}, Total: 521451447,
|
Cpus: []int64{521451447, 0, 0, 0}, Total: 521451447,
|
||||||
},
|
},
|
||||||
IRQ{
|
{
|
||||||
ID: "86", Type: "ARMCTRL-level", Device: "88 Edge mmc0",
|
ID: "86", Type: "ARMCTRL-level", Device: "88 Edge mmc0",
|
||||||
Cpus: []int64{857597, 0, 0, 0}, Total: 857597,
|
Cpus: []int64{857597, 0, 0, 0}, Total: 857597,
|
||||||
},
|
},
|
||||||
IRQ{
|
{
|
||||||
ID: "87", Type: "ARMCTRL-level", Device: "89 Edge uart-pl011",
|
ID: "87", Type: "ARMCTRL-level", Device: "89 Edge uart-pl011",
|
||||||
Cpus: []int64{4938, 0, 0, 0}, Total: 4938,
|
Cpus: []int64{4938, 0, 0, 0}, Total: 4938,
|
||||||
},
|
},
|
||||||
IRQ{
|
{
|
||||||
ID: "92", Type: "ARMCTRL-level", Device: "94 Edge mmc1",
|
ID: "92", Type: "ARMCTRL-level", Device: "94 Edge mmc1",
|
||||||
Cpus: []int64{5669, 0, 0, 0}, Total: 5669,
|
Cpus: []int64{5669, 0, 0, 0}, Total: 5669,
|
||||||
},
|
},
|
||||||
IRQ{
|
{
|
||||||
ID: "IPI0", Type: "CPU wakeup interrupts",
|
ID: "IPI0", Type: "CPU wakeup interrupts",
|
||||||
Cpus: []int64{0, 0, 0, 0},
|
Cpus: []int64{0, 0, 0, 0},
|
||||||
},
|
},
|
||||||
IRQ{
|
{
|
||||||
ID: "IPI1", Type: "Timer broadcast interrupts",
|
ID: "IPI1", Type: "Timer broadcast interrupts",
|
||||||
Cpus: []int64{0, 0, 0, 0},
|
Cpus: []int64{0, 0, 0, 0},
|
||||||
},
|
},
|
||||||
IRQ{
|
{
|
||||||
ID: "IPI2", Type: "Rescheduling interrupts",
|
ID: "IPI2", Type: "Rescheduling interrupts",
|
||||||
Cpus: []int64{23564958, 23464876, 23531165, 23040826}, Total: 93601825,
|
Cpus: []int64{23564958, 23464876, 23531165, 23040826}, Total: 93601825,
|
||||||
},
|
},
|
||||||
IRQ{
|
{
|
||||||
ID: "IPI3", Type: "Function call interrupts",
|
ID: "IPI3", Type: "Function call interrupts",
|
||||||
Cpus: []int64{148438, 639704, 644266, 588150}, Total: 2020558,
|
Cpus: []int64{148438, 639704, 644266, 588150}, Total: 2020558,
|
||||||
},
|
},
|
||||||
IRQ{
|
{
|
||||||
ID: "IPI4", Type: "CPU stop interrupts",
|
ID: "IPI4", Type: "CPU stop interrupts",
|
||||||
Cpus: []int64{0, 0, 0, 0},
|
Cpus: []int64{0, 0, 0, 0},
|
||||||
},
|
},
|
||||||
IRQ{
|
{
|
||||||
ID: "IPI5", Type: "IRQ work interrupts",
|
ID: "IPI5", Type: "IRQ work interrupts",
|
||||||
Cpus: []int64{4348149, 1843985, 3819457, 1822877}, Total: 11834468,
|
Cpus: []int64{4348149, 1843985, 3819457, 1822877}, Total: 11834468,
|
||||||
},
|
},
|
||||||
IRQ{
|
{
|
||||||
ID: "IPI6", Type: "completion interrupts",
|
ID: "IPI6", Type: "completion interrupts",
|
||||||
Cpus: []int64{0, 0, 0, 0},
|
Cpus: []int64{0, 0, 0, 0},
|
||||||
},
|
},
|
||||||
|
|
|
@ -28,7 +28,7 @@ func NewConnection(server string, privilege string) *Connection {
|
||||||
|
|
||||||
if inx1 > 0 {
|
if inx1 > 0 {
|
||||||
security := server[0:inx1]
|
security := server[0:inx1]
|
||||||
connstr = server[inx1+1 : len(server)]
|
connstr = server[inx1+1:]
|
||||||
up := strings.SplitN(security, ":", 2)
|
up := strings.SplitN(security, ":", 2)
|
||||||
conn.Username = up[0]
|
conn.Username = up[0]
|
||||||
conn.Password = up[1]
|
conn.Password = up[1]
|
||||||
|
|
|
@ -50,8 +50,8 @@ func TestIpset(t *testing.T) {
|
||||||
add myset 3.4.5.6 packets 3 bytes 222
|
add myset 3.4.5.6 packets 3 bytes 222
|
||||||
`,
|
`,
|
||||||
tags: []map[string]string{
|
tags: []map[string]string{
|
||||||
map[string]string{"set": "myset", "rule": "1.2.3.4"},
|
{"set": "myset", "rule": "1.2.3.4"},
|
||||||
map[string]string{"set": "myset", "rule": "3.4.5.6"},
|
{"set": "myset", "rule": "3.4.5.6"},
|
||||||
},
|
},
|
||||||
fields: [][]map[string]interface{}{
|
fields: [][]map[string]interface{}{
|
||||||
{map[string]interface{}{"packets_total": uint64(1328), "bytes_total": uint64(79680)}},
|
{map[string]interface{}{"packets_total": uint64(1328), "bytes_total": uint64(79680)}},
|
||||||
|
@ -66,8 +66,8 @@ func TestIpset(t *testing.T) {
|
||||||
add myset 3.4.5.6 packets 3 bytes 222 "3rd IP"
|
add myset 3.4.5.6 packets 3 bytes 222 "3rd IP"
|
||||||
`,
|
`,
|
||||||
tags: []map[string]string{
|
tags: []map[string]string{
|
||||||
map[string]string{"set": "myset", "rule": "1.2.3.4"},
|
{"set": "myset", "rule": "1.2.3.4"},
|
||||||
map[string]string{"set": "myset", "rule": "3.4.5.6"},
|
{"set": "myset", "rule": "3.4.5.6"},
|
||||||
},
|
},
|
||||||
fields: [][]map[string]interface{}{
|
fields: [][]map[string]interface{}{
|
||||||
{map[string]interface{}{"packets_total": uint64(1328), "bytes_total": uint64(79680)}},
|
{map[string]interface{}{"packets_total": uint64(1328), "bytes_total": uint64(79680)}},
|
||||||
|
|
|
@ -42,7 +42,7 @@ func TestIptables_Gather(t *testing.T) {
|
||||||
pkts bytes target prot opt in out source destination
|
pkts bytes target prot opt in out source destination
|
||||||
57 4520 RETURN tcp -- * * 0.0.0.0/0 0.0.0.0/0 /* foobar */
|
57 4520 RETURN tcp -- * * 0.0.0.0/0 0.0.0.0/0 /* foobar */
|
||||||
`},
|
`},
|
||||||
tags: []map[string]string{map[string]string{"table": "filter", "chain": "INPUT", "ruleid": "foobar"}},
|
tags: []map[string]string{{"table": "filter", "chain": "INPUT", "ruleid": "foobar"}},
|
||||||
fields: [][]map[string]interface{}{
|
fields: [][]map[string]interface{}{
|
||||||
{map[string]interface{}{"pkts": uint64(57), "bytes": uint64(4520)}},
|
{map[string]interface{}{"pkts": uint64(57), "bytes": uint64(4520)}},
|
||||||
},
|
},
|
||||||
|
@ -98,9 +98,9 @@ func TestIptables_Gather(t *testing.T) {
|
||||||
`,
|
`,
|
||||||
},
|
},
|
||||||
tags: []map[string]string{
|
tags: []map[string]string{
|
||||||
map[string]string{"table": "filter", "chain": "INPUT", "ruleid": "foo"},
|
{"table": "filter", "chain": "INPUT", "ruleid": "foo"},
|
||||||
map[string]string{"table": "filter", "chain": "FORWARD", "ruleid": "bar"},
|
{"table": "filter", "chain": "FORWARD", "ruleid": "bar"},
|
||||||
map[string]string{"table": "filter", "chain": "FORWARD", "ruleid": "foobar"},
|
{"table": "filter", "chain": "FORWARD", "ruleid": "foobar"},
|
||||||
},
|
},
|
||||||
fields: [][]map[string]interface{}{
|
fields: [][]map[string]interface{}{
|
||||||
{map[string]interface{}{"pkts": uint64(200), "bytes": uint64(4520)}},
|
{map[string]interface{}{"pkts": uint64(200), "bytes": uint64(4520)}},
|
||||||
|
@ -118,7 +118,7 @@ func TestIptables_Gather(t *testing.T) {
|
||||||
100 4520 RETURN tcp -- * * 0.0.0.0/0 0.0.0.0/0 tcp dpt:80
|
100 4520 RETURN tcp -- * * 0.0.0.0/0 0.0.0.0/0 tcp dpt:80
|
||||||
`},
|
`},
|
||||||
tags: []map[string]string{
|
tags: []map[string]string{
|
||||||
map[string]string{"table": "filter", "chain": "INPUT", "ruleid": "foobar"},
|
{"table": "filter", "chain": "INPUT", "ruleid": "foobar"},
|
||||||
},
|
},
|
||||||
fields: [][]map[string]interface{}{
|
fields: [][]map[string]interface{}{
|
||||||
{map[string]interface{}{"pkts": uint64(57), "bytes": uint64(4520)}},
|
{map[string]interface{}{"pkts": uint64(57), "bytes": uint64(4520)}},
|
||||||
|
@ -134,8 +134,8 @@ func TestIptables_Gather(t *testing.T) {
|
||||||
0 0 CLASSIFY all -- * * 1.3.5.7 0.0.0.0/0 /* test2 */ CLASSIFY set 1:4
|
0 0 CLASSIFY all -- * * 1.3.5.7 0.0.0.0/0 /* test2 */ CLASSIFY set 1:4
|
||||||
`},
|
`},
|
||||||
tags: []map[string]string{
|
tags: []map[string]string{
|
||||||
map[string]string{"table": "mangle", "chain": "SHAPER", "ruleid": "test"},
|
{"table": "mangle", "chain": "SHAPER", "ruleid": "test"},
|
||||||
map[string]string{"table": "mangle", "chain": "SHAPER", "ruleid": "test2"},
|
{"table": "mangle", "chain": "SHAPER", "ruleid": "test2"},
|
||||||
},
|
},
|
||||||
fields: [][]map[string]interface{}{
|
fields: [][]map[string]interface{}{
|
||||||
{map[string]interface{}{"pkts": uint64(0), "bytes": uint64(0)}},
|
{map[string]interface{}{"pkts": uint64(0), "bytes": uint64(0)}},
|
||||||
|
@ -163,7 +163,7 @@ func TestIptables_Gather(t *testing.T) {
|
||||||
123 456 all -- eth0 * 0.0.0.0/0 0.0.0.0/0 /* all_recv */
|
123 456 all -- eth0 * 0.0.0.0/0 0.0.0.0/0 /* all_recv */
|
||||||
`},
|
`},
|
||||||
tags: []map[string]string{
|
tags: []map[string]string{
|
||||||
map[string]string{"table": "all_recv", "chain": "accountfwd", "ruleid": "all_recv"},
|
{"table": "all_recv", "chain": "accountfwd", "ruleid": "all_recv"},
|
||||||
},
|
},
|
||||||
fields: [][]map[string]interface{}{
|
fields: [][]map[string]interface{}{
|
||||||
{map[string]interface{}{"pkts": uint64(123), "bytes": uint64(456)}},
|
{map[string]interface{}{"pkts": uint64(123), "bytes": uint64(456)}},
|
||||||
|
|
|
@ -117,7 +117,7 @@ const invalidJSON = "I don't think this is JSON"
|
||||||
|
|
||||||
const empty = ""
|
const empty = ""
|
||||||
|
|
||||||
var Servers = []Server{Server{Name: "as1", Host: "127.0.0.1", Port: "8080"}}
|
var Servers = []Server{{Name: "as1", Host: "127.0.0.1", Port: "8080"}}
|
||||||
var HeapMetric = Metric{Name: "heap_memory_usage",
|
var HeapMetric = Metric{Name: "heap_memory_usage",
|
||||||
Mbean: "java.lang:type=Memory", Attribute: "HeapMemoryUsage"}
|
Mbean: "java.lang:type=Memory", Attribute: "HeapMemoryUsage"}
|
||||||
var UsedHeapMetric = Metric{Name: "heap_memory_usage",
|
var UsedHeapMetric = Metric{Name: "heap_memory_usage",
|
||||||
|
|
|
@ -17,7 +17,7 @@ func TestJolokia2_makeReadRequests(t *testing.T) {
|
||||||
Mbean: "test:foo=bar",
|
Mbean: "test:foo=bar",
|
||||||
},
|
},
|
||||||
expected: []ReadRequest{
|
expected: []ReadRequest{
|
||||||
ReadRequest{
|
{
|
||||||
Mbean: "test:foo=bar",
|
Mbean: "test:foo=bar",
|
||||||
Attributes: []string{},
|
Attributes: []string{},
|
||||||
},
|
},
|
||||||
|
@ -29,7 +29,7 @@ func TestJolokia2_makeReadRequests(t *testing.T) {
|
||||||
Paths: []string{"biz"},
|
Paths: []string{"biz"},
|
||||||
},
|
},
|
||||||
expected: []ReadRequest{
|
expected: []ReadRequest{
|
||||||
ReadRequest{
|
{
|
||||||
Mbean: "test:foo=bar",
|
Mbean: "test:foo=bar",
|
||||||
Attributes: []string{"biz"},
|
Attributes: []string{"biz"},
|
||||||
},
|
},
|
||||||
|
@ -41,7 +41,7 @@ func TestJolokia2_makeReadRequests(t *testing.T) {
|
||||||
Paths: []string{"baz", "biz"},
|
Paths: []string{"baz", "biz"},
|
||||||
},
|
},
|
||||||
expected: []ReadRequest{
|
expected: []ReadRequest{
|
||||||
ReadRequest{
|
{
|
||||||
Mbean: "test:foo=bar",
|
Mbean: "test:foo=bar",
|
||||||
Attributes: []string{"baz", "biz"},
|
Attributes: []string{"baz", "biz"},
|
||||||
},
|
},
|
||||||
|
@ -53,7 +53,7 @@ func TestJolokia2_makeReadRequests(t *testing.T) {
|
||||||
Paths: []string{"biz/baz"},
|
Paths: []string{"biz/baz"},
|
||||||
},
|
},
|
||||||
expected: []ReadRequest{
|
expected: []ReadRequest{
|
||||||
ReadRequest{
|
{
|
||||||
Mbean: "test:foo=bar",
|
Mbean: "test:foo=bar",
|
||||||
Attributes: []string{"biz"},
|
Attributes: []string{"biz"},
|
||||||
Path: "baz",
|
Path: "baz",
|
||||||
|
@ -66,7 +66,7 @@ func TestJolokia2_makeReadRequests(t *testing.T) {
|
||||||
Paths: []string{"biz/baz/fiz/faz"},
|
Paths: []string{"biz/baz/fiz/faz"},
|
||||||
},
|
},
|
||||||
expected: []ReadRequest{
|
expected: []ReadRequest{
|
||||||
ReadRequest{
|
{
|
||||||
Mbean: "test:foo=bar",
|
Mbean: "test:foo=bar",
|
||||||
Attributes: []string{"biz"},
|
Attributes: []string{"biz"},
|
||||||
Path: "baz/fiz/faz",
|
Path: "baz/fiz/faz",
|
||||||
|
@ -79,12 +79,12 @@ func TestJolokia2_makeReadRequests(t *testing.T) {
|
||||||
Paths: []string{"baz/biz", "faz/fiz"},
|
Paths: []string{"baz/biz", "faz/fiz"},
|
||||||
},
|
},
|
||||||
expected: []ReadRequest{
|
expected: []ReadRequest{
|
||||||
ReadRequest{
|
{
|
||||||
Mbean: "test:foo=bar",
|
Mbean: "test:foo=bar",
|
||||||
Attributes: []string{"baz"},
|
Attributes: []string{"baz"},
|
||||||
Path: "biz",
|
Path: "biz",
|
||||||
},
|
},
|
||||||
ReadRequest{
|
{
|
||||||
Mbean: "test:foo=bar",
|
Mbean: "test:foo=bar",
|
||||||
Attributes: []string{"faz"},
|
Attributes: []string{"faz"},
|
||||||
Path: "fiz",
|
Path: "fiz",
|
||||||
|
|
|
@ -748,7 +748,7 @@ func setupPlugin(t *testing.T, conf string) telegraf.Input {
|
||||||
t.Fatalf("Unable to parse config! %v", err)
|
t.Fatalf("Unable to parse config! %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
for name, _ := range table.Fields {
|
for name := range table.Fields {
|
||||||
object := table.Fields[name]
|
object := table.Fields[name]
|
||||||
switch name {
|
switch name {
|
||||||
case "jolokia2_agent":
|
case "jolokia2_agent":
|
||||||
|
|
|
@ -42,8 +42,8 @@ type Mesos struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
var allMetrics = map[Role][]string{
|
var allMetrics = map[Role][]string{
|
||||||
MASTER: []string{"resources", "master", "system", "agents", "frameworks", "tasks", "messages", "evqueue", "registrar"},
|
MASTER: {"resources", "master", "system", "agents", "frameworks", "tasks", "messages", "evqueue", "registrar"},
|
||||||
SLAVE: []string{"resources", "agent", "system", "executors", "tasks", "messages"},
|
SLAVE: {"resources", "agent", "system", "executors", "tasks", "messages"},
|
||||||
}
|
}
|
||||||
|
|
||||||
var sampleConfig = `
|
var sampleConfig = `
|
||||||
|
|
|
@ -53,7 +53,7 @@ func TestAddNonReplStats(t *testing.T) {
|
||||||
d.AddDefaultStats()
|
d.AddDefaultStats()
|
||||||
d.flush(&acc)
|
d.flush(&acc)
|
||||||
|
|
||||||
for key, _ := range DefaultStats {
|
for key := range DefaultStats {
|
||||||
assert.True(t, acc.HasInt64Field("mongodb", key))
|
assert.True(t, acc.HasInt64Field("mongodb", key))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -74,7 +74,7 @@ func TestAddReplStats(t *testing.T) {
|
||||||
d.AddDefaultStats()
|
d.AddDefaultStats()
|
||||||
d.flush(&acc)
|
d.flush(&acc)
|
||||||
|
|
||||||
for key, _ := range MmapStats {
|
for key := range MmapStats {
|
||||||
assert.True(t, acc.HasInt64Field("mongodb", key))
|
assert.True(t, acc.HasInt64Field("mongodb", key))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -106,7 +106,7 @@ func TestAddWiredTigerStats(t *testing.T) {
|
||||||
d.AddDefaultStats()
|
d.AddDefaultStats()
|
||||||
d.flush(&acc)
|
d.flush(&acc)
|
||||||
|
|
||||||
for key, _ := range WiredTigerStats {
|
for key := range WiredTigerStats {
|
||||||
assert.True(t, acc.HasFloatField("mongodb", key))
|
assert.True(t, acc.HasFloatField("mongodb", key))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -127,7 +127,7 @@ func TestAddShardStats(t *testing.T) {
|
||||||
d.AddDefaultStats()
|
d.AddDefaultStats()
|
||||||
d.flush(&acc)
|
d.flush(&acc)
|
||||||
|
|
||||||
for key, _ := range DefaultShardStats {
|
for key := range DefaultShardStats {
|
||||||
assert.True(t, acc.HasInt64Field("mongodb", key))
|
assert.True(t, acc.HasInt64Field("mongodb", key))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -156,8 +156,8 @@ func TestAddShardHostStats(t *testing.T) {
|
||||||
d.flush(&acc)
|
d.flush(&acc)
|
||||||
|
|
||||||
var hostsFound []string
|
var hostsFound []string
|
||||||
for host, _ := range hostStatLines {
|
for host := range hostStatLines {
|
||||||
for key, _ := range ShardHostStats {
|
for key := range ShardHostStats {
|
||||||
assert.True(t, acc.HasInt64Field("mongodb_shard_stats", key))
|
assert.True(t, acc.HasInt64Field("mongodb_shard_stats", key))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -35,7 +35,7 @@ func TestAddDefaultStats(t *testing.T) {
|
||||||
err = server.gatherData(&acc, false)
|
err = server.gatherData(&acc, false)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
for key, _ := range DefaultStats {
|
for key := range DefaultStats {
|
||||||
assert.True(t, acc.HasInt64Field("mongodb", key))
|
assert.True(t, acc.HasInt64Field("mongodb", key))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,7 +31,7 @@ func TestNetStats(t *testing.T) {
|
||||||
mps.On("NetIO").Return([]net.IOCountersStat{netio}, nil)
|
mps.On("NetIO").Return([]net.IOCountersStat{netio}, nil)
|
||||||
|
|
||||||
netprotos := []net.ProtoCountersStat{
|
netprotos := []net.ProtoCountersStat{
|
||||||
net.ProtoCountersStat{
|
{
|
||||||
Protocol: "Udp",
|
Protocol: "Udp",
|
||||||
Stats: map[string]int64{
|
Stats: map[string]int64{
|
||||||
"InDatagrams": 4655,
|
"InDatagrams": 4655,
|
||||||
|
@ -42,16 +42,16 @@ func TestNetStats(t *testing.T) {
|
||||||
mps.On("NetProto").Return(netprotos, nil)
|
mps.On("NetProto").Return(netprotos, nil)
|
||||||
|
|
||||||
netstats := []net.ConnectionStat{
|
netstats := []net.ConnectionStat{
|
||||||
net.ConnectionStat{
|
{
|
||||||
Type: syscall.SOCK_DGRAM,
|
Type: syscall.SOCK_DGRAM,
|
||||||
},
|
},
|
||||||
net.ConnectionStat{
|
{
|
||||||
Status: "ESTABLISHED",
|
Status: "ESTABLISHED",
|
||||||
},
|
},
|
||||||
net.ConnectionStat{
|
{
|
||||||
Status: "ESTABLISHED",
|
Status: "ESTABLISHED",
|
||||||
},
|
},
|
||||||
net.ConnectionStat{
|
{
|
||||||
Status: "CLOSE",
|
Status: "CLOSE",
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,12 +24,12 @@ func TestReadsMetricsFromNSQ(t *testing.T) {
|
||||||
|
|
||||||
script := []instruction{
|
script := []instruction{
|
||||||
// SUB
|
// SUB
|
||||||
instruction{0, nsq.FrameTypeResponse, []byte("OK")},
|
{0, nsq.FrameTypeResponse, []byte("OK")},
|
||||||
// IDENTIFY
|
// IDENTIFY
|
||||||
instruction{0, nsq.FrameTypeResponse, []byte("OK")},
|
{0, nsq.FrameTypeResponse, []byte("OK")},
|
||||||
instruction{20 * time.Millisecond, nsq.FrameTypeMessage, frameMessage(msg)},
|
{20 * time.Millisecond, nsq.FrameTypeMessage, frameMessage(msg)},
|
||||||
// needed to exit test
|
// needed to exit test
|
||||||
instruction{100 * time.Millisecond, -1, []byte("exit")},
|
{100 * time.Millisecond, -1, []byte("exit")},
|
||||||
}
|
}
|
||||||
|
|
||||||
addr, _ := net.ResolveTCPAddr("tcp", "127.0.0.1:4155")
|
addr, _ := net.ResolveTCPAddr("tcp", "127.0.0.1:4155")
|
||||||
|
|
|
@ -18,19 +18,19 @@ var (
|
||||||
measurement = "nvidia_smi"
|
measurement = "nvidia_smi"
|
||||||
metrics = "fan.speed,memory.total,memory.used,memory.free,pstate,temperature.gpu,name,uuid,compute_mode,utilization.gpu,utilization.memory,index,power.draw"
|
metrics = "fan.speed,memory.total,memory.used,memory.free,pstate,temperature.gpu,name,uuid,compute_mode,utilization.gpu,utilization.memory,index,power.draw"
|
||||||
metricNames = [][]string{
|
metricNames = [][]string{
|
||||||
[]string{"fan_speed", "integer"},
|
{"fan_speed", "integer"},
|
||||||
[]string{"memory_total", "integer"},
|
{"memory_total", "integer"},
|
||||||
[]string{"memory_used", "integer"},
|
{"memory_used", "integer"},
|
||||||
[]string{"memory_free", "integer"},
|
{"memory_free", "integer"},
|
||||||
[]string{"pstate", "tag"},
|
{"pstate", "tag"},
|
||||||
[]string{"temperature_gpu", "integer"},
|
{"temperature_gpu", "integer"},
|
||||||
[]string{"name", "tag"},
|
{"name", "tag"},
|
||||||
[]string{"uuid", "tag"},
|
{"uuid", "tag"},
|
||||||
[]string{"compute_mode", "tag"},
|
{"compute_mode", "tag"},
|
||||||
[]string{"utilization_gpu", "integer"},
|
{"utilization_gpu", "integer"},
|
||||||
[]string{"utilization_memory", "integer"},
|
{"utilization_memory", "integer"},
|
||||||
[]string{"index", "tag"},
|
{"index", "tag"},
|
||||||
[]string{"power_draw", "float"},
|
{"power_draw", "float"},
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -72,11 +72,11 @@ type pfctlOutputStanza struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
var pfctlOutputStanzas = []*pfctlOutputStanza{
|
var pfctlOutputStanzas = []*pfctlOutputStanza{
|
||||||
&pfctlOutputStanza{
|
{
|
||||||
HeaderRE: regexp.MustCompile("^State Table"),
|
HeaderRE: regexp.MustCompile("^State Table"),
|
||||||
ParseFunc: parseStateTable,
|
ParseFunc: parseStateTable,
|
||||||
},
|
},
|
||||||
&pfctlOutputStanza{
|
{
|
||||||
HeaderRE: regexp.MustCompile("^Counters"),
|
HeaderRE: regexp.MustCompile("^Counters"),
|
||||||
ParseFunc: parseCounterTable,
|
ParseFunc: parseCounterTable,
|
||||||
},
|
},
|
||||||
|
@ -127,10 +127,10 @@ type Entry struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
var StateTable = []*Entry{
|
var StateTable = []*Entry{
|
||||||
&Entry{"entries", "current entries", -1},
|
{"entries", "current entries", -1},
|
||||||
&Entry{"searches", "searches", -1},
|
{"searches", "searches", -1},
|
||||||
&Entry{"inserts", "inserts", -1},
|
{"inserts", "inserts", -1},
|
||||||
&Entry{"removals", "removals", -1},
|
{"removals", "removals", -1},
|
||||||
}
|
}
|
||||||
|
|
||||||
var stateTableRE = regexp.MustCompile(`^ (.*?)\s+(\d+)`)
|
var stateTableRE = regexp.MustCompile(`^ (.*?)\s+(\d+)`)
|
||||||
|
@ -140,21 +140,21 @@ func parseStateTable(lines []string, fields map[string]interface{}) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
var CounterTable = []*Entry{
|
var CounterTable = []*Entry{
|
||||||
&Entry{"match", "match", -1},
|
{"match", "match", -1},
|
||||||
&Entry{"bad-offset", "bad-offset", -1},
|
{"bad-offset", "bad-offset", -1},
|
||||||
&Entry{"fragment", "fragment", -1},
|
{"fragment", "fragment", -1},
|
||||||
&Entry{"short", "short", -1},
|
{"short", "short", -1},
|
||||||
&Entry{"normalize", "normalize", -1},
|
{"normalize", "normalize", -1},
|
||||||
&Entry{"memory", "memory", -1},
|
{"memory", "memory", -1},
|
||||||
&Entry{"bad-timestamp", "bad-timestamp", -1},
|
{"bad-timestamp", "bad-timestamp", -1},
|
||||||
&Entry{"congestion", "congestion", -1},
|
{"congestion", "congestion", -1},
|
||||||
&Entry{"ip-option", "ip-option", -1},
|
{"ip-option", "ip-option", -1},
|
||||||
&Entry{"proto-cksum", "proto-cksum", -1},
|
{"proto-cksum", "proto-cksum", -1},
|
||||||
&Entry{"state-mismatch", "state-mismatch", -1},
|
{"state-mismatch", "state-mismatch", -1},
|
||||||
&Entry{"state-insert", "state-insert", -1},
|
{"state-insert", "state-insert", -1},
|
||||||
&Entry{"state-limit", "state-limit", -1},
|
{"state-limit", "state-limit", -1},
|
||||||
&Entry{"src-limit", "src-limit", -1},
|
{"src-limit", "src-limit", -1},
|
||||||
&Entry{"synproxy", "synproxy", -1},
|
{"synproxy", "synproxy", -1},
|
||||||
}
|
}
|
||||||
|
|
||||||
var counterTableRE = regexp.MustCompile(`^ (.*?)\s+(\d+)`)
|
var counterTableRE = regexp.MustCompile(`^ (.*?)\s+(\d+)`)
|
||||||
|
|
|
@ -23,13 +23,13 @@ func TestPfctlInvocation(t *testing.T) {
|
||||||
|
|
||||||
var testCases = []pfctlInvocationTestCase{
|
var testCases = []pfctlInvocationTestCase{
|
||||||
// 0: no sudo
|
// 0: no sudo
|
||||||
pfctlInvocationTestCase{
|
{
|
||||||
config: PF{UseSudo: false},
|
config: PF{UseSudo: false},
|
||||||
cmd: "fakepfctl",
|
cmd: "fakepfctl",
|
||||||
args: []string{"-s", "info"},
|
args: []string{"-s", "info"},
|
||||||
},
|
},
|
||||||
// 1: with sudo
|
// 1: with sudo
|
||||||
pfctlInvocationTestCase{
|
{
|
||||||
config: PF{UseSudo: true},
|
config: PF{UseSudo: true},
|
||||||
cmd: "fakesudo",
|
cmd: "fakesudo",
|
||||||
args: []string{"fakepfctl", "-s", "info"},
|
args: []string{"fakepfctl", "-s", "info"},
|
||||||
|
@ -60,9 +60,9 @@ func TestPfMeasurements(t *testing.T) {
|
||||||
|
|
||||||
testCases := []pfTestCase{
|
testCases := []pfTestCase{
|
||||||
// 0: nil input should raise an error
|
// 0: nil input should raise an error
|
||||||
pfTestCase{TestInput: "", err: errParseHeader},
|
{TestInput: "", err: errParseHeader},
|
||||||
// 1: changes to pfctl output should raise an error
|
// 1: changes to pfctl output should raise an error
|
||||||
pfTestCase{TestInput: `Status: Enabled for 161 days 21:24:45 Debug: Urgent
|
{TestInput: `Status: Enabled for 161 days 21:24:45 Debug: Urgent
|
||||||
|
|
||||||
Interface Stats for re1 IPv4 IPv6
|
Interface Stats for re1 IPv4 IPv6
|
||||||
Bytes In 2585823744614 1059233657221
|
Bytes In 2585823744614 1059233657221
|
||||||
|
@ -99,7 +99,7 @@ Counters
|
||||||
err: errMissingData("current entries"),
|
err: errMissingData("current entries"),
|
||||||
},
|
},
|
||||||
// 2: bad numbers should raise an error
|
// 2: bad numbers should raise an error
|
||||||
pfTestCase{TestInput: `Status: Enabled for 0 days 00:26:05 Debug: Urgent
|
{TestInput: `Status: Enabled for 0 days 00:26:05 Debug: Urgent
|
||||||
|
|
||||||
State Table Total Rate
|
State Table Total Rate
|
||||||
current entries -23
|
current entries -23
|
||||||
|
@ -125,7 +125,7 @@ Counters
|
||||||
`,
|
`,
|
||||||
err: errMissingData("current entries"),
|
err: errMissingData("current entries"),
|
||||||
},
|
},
|
||||||
pfTestCase{TestInput: `Status: Enabled for 0 days 00:26:05 Debug: Urgent
|
{TestInput: `Status: Enabled for 0 days 00:26:05 Debug: Urgent
|
||||||
|
|
||||||
State Table Total Rate
|
State Table Total Rate
|
||||||
current entries 2
|
current entries 2
|
||||||
|
@ -150,7 +150,7 @@ Counters
|
||||||
synproxy 0 0.0/s
|
synproxy 0 0.0/s
|
||||||
`,
|
`,
|
||||||
measurements: []measurementResult{
|
measurements: []measurementResult{
|
||||||
measurementResult{
|
{
|
||||||
fields: map[string]interface{}{
|
fields: map[string]interface{}{
|
||||||
"entries": int64(2),
|
"entries": int64(2),
|
||||||
"searches": int64(11325),
|
"searches": int64(11325),
|
||||||
|
@ -175,7 +175,7 @@ Counters
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
pfTestCase{TestInput: `Status: Enabled for 161 days 21:24:45 Debug: Urgent
|
{TestInput: `Status: Enabled for 161 days 21:24:45 Debug: Urgent
|
||||||
|
|
||||||
Interface Stats for re1 IPv4 IPv6
|
Interface Stats for re1 IPv4 IPv6
|
||||||
Bytes In 2585823744614 1059233657221
|
Bytes In 2585823744614 1059233657221
|
||||||
|
@ -210,7 +210,7 @@ Counters
|
||||||
synproxy 0 0.0/s
|
synproxy 0 0.0/s
|
||||||
`,
|
`,
|
||||||
measurements: []measurementResult{
|
measurements: []measurementResult{
|
||||||
measurementResult{
|
{
|
||||||
fields: map[string]interface{}{
|
fields: map[string]interface{}{
|
||||||
"entries": int64(649),
|
"entries": int64(649),
|
||||||
"searches": int64(18421725761),
|
"searches": int64(18421725761),
|
||||||
|
|
|
@ -61,26 +61,26 @@ func init() {
|
||||||
|
|
||||||
// BEGIN GO GENERATE CONTENT
|
// BEGIN GO GENERATE CONTENT
|
||||||
var mockedCommandResults = map[string]mockedCommandResult{
|
var mockedCommandResults = map[string]mockedCommandResult{
|
||||||
"snmptranslate\x00-Td\x00-Ob\x00-m\x00all\x00.1.0.0.0": mockedCommandResult{stdout: "TEST::testTable\ntestTable OBJECT-TYPE\n -- FROM\tTEST\n MAX-ACCESS\tnot-accessible\n STATUS\tcurrent\n::= { iso(1) 0 testOID(0) 0 }\n", stderr: "", exitError: false},
|
"snmptranslate\x00-Td\x00-Ob\x00-m\x00all\x00.1.0.0.0": {stdout: "TEST::testTable\ntestTable OBJECT-TYPE\n -- FROM\tTEST\n MAX-ACCESS\tnot-accessible\n STATUS\tcurrent\n::= { iso(1) 0 testOID(0) 0 }\n", stderr: "", exitError: false},
|
||||||
"snmptranslate\x00-Td\x00-Ob\x00-m\x00all\x00.1.0.0.1.1": mockedCommandResult{stdout: "TEST::hostname\nhostname OBJECT-TYPE\n -- FROM\tTEST\n SYNTAX\tOCTET STRING\n MAX-ACCESS\tread-only\n STATUS\tcurrent\n::= { iso(1) 0 testOID(0) 1 1 }\n", stderr: "", exitError: false},
|
"snmptranslate\x00-Td\x00-Ob\x00-m\x00all\x00.1.0.0.1.1": {stdout: "TEST::hostname\nhostname OBJECT-TYPE\n -- FROM\tTEST\n SYNTAX\tOCTET STRING\n MAX-ACCESS\tread-only\n STATUS\tcurrent\n::= { iso(1) 0 testOID(0) 1 1 }\n", stderr: "", exitError: false},
|
||||||
"snmptranslate\x00-Td\x00-Ob\x00-m\x00all\x00.1.0.0.1.2": mockedCommandResult{stdout: "TEST::1.2\nanonymous#1 OBJECT-TYPE\n -- FROM\tTEST\n::= { iso(1) 0 testOID(0) 1 2 }\n", stderr: "", exitError: false},
|
"snmptranslate\x00-Td\x00-Ob\x00-m\x00all\x00.1.0.0.1.2": {stdout: "TEST::1.2\nanonymous#1 OBJECT-TYPE\n -- FROM\tTEST\n::= { iso(1) 0 testOID(0) 1 2 }\n", stderr: "", exitError: false},
|
||||||
"snmptranslate\x00-Td\x00-Ob\x00-m\x00all\x001.0.0.1.1": mockedCommandResult{stdout: "TEST::hostname\nhostname OBJECT-TYPE\n -- FROM\tTEST\n SYNTAX\tOCTET STRING\n MAX-ACCESS\tread-only\n STATUS\tcurrent\n::= { iso(1) 0 testOID(0) 1 1 }\n", stderr: "", exitError: false},
|
"snmptranslate\x00-Td\x00-Ob\x00-m\x00all\x001.0.0.1.1": {stdout: "TEST::hostname\nhostname OBJECT-TYPE\n -- FROM\tTEST\n SYNTAX\tOCTET STRING\n MAX-ACCESS\tread-only\n STATUS\tcurrent\n::= { iso(1) 0 testOID(0) 1 1 }\n", stderr: "", exitError: false},
|
||||||
"snmptranslate\x00-Td\x00-Ob\x00-m\x00all\x00.1.0.0.0.1.1": mockedCommandResult{stdout: "TEST::server\nserver OBJECT-TYPE\n -- FROM\tTEST\n SYNTAX\tOCTET STRING\n MAX-ACCESS\tread-only\n STATUS\tcurrent\n::= { iso(1) 0 testOID(0) testTable(0) testTableEntry(1) 1 }\n", stderr: "", exitError: false},
|
"snmptranslate\x00-Td\x00-Ob\x00-m\x00all\x00.1.0.0.0.1.1": {stdout: "TEST::server\nserver OBJECT-TYPE\n -- FROM\tTEST\n SYNTAX\tOCTET STRING\n MAX-ACCESS\tread-only\n STATUS\tcurrent\n::= { iso(1) 0 testOID(0) testTable(0) testTableEntry(1) 1 }\n", stderr: "", exitError: false},
|
||||||
"snmptranslate\x00-Td\x00-Ob\x00-m\x00all\x00.1.0.0.0.1.1.0": mockedCommandResult{stdout: "TEST::server.0\nserver OBJECT-TYPE\n -- FROM\tTEST\n SYNTAX\tOCTET STRING\n MAX-ACCESS\tread-only\n STATUS\tcurrent\n::= { iso(1) 0 testOID(0) testTable(0) testTableEntry(1) server(1) 0 }\n", stderr: "", exitError: false},
|
"snmptranslate\x00-Td\x00-Ob\x00-m\x00all\x00.1.0.0.0.1.1.0": {stdout: "TEST::server.0\nserver OBJECT-TYPE\n -- FROM\tTEST\n SYNTAX\tOCTET STRING\n MAX-ACCESS\tread-only\n STATUS\tcurrent\n::= { iso(1) 0 testOID(0) testTable(0) testTableEntry(1) server(1) 0 }\n", stderr: "", exitError: false},
|
||||||
"snmptranslate\x00-Td\x00-Ob\x00-m\x00all\x00.1.0.0.0.1.5": mockedCommandResult{stdout: "TEST::testTableEntry.5\ntestTableEntry OBJECT-TYPE\n -- FROM\tTEST\n MAX-ACCESS\tnot-accessible\n STATUS\tcurrent\n INDEX\t\t{ server }\n::= { iso(1) 0 testOID(0) testTable(0) testTableEntry(1) 5 }\n", stderr: "", exitError: false},
|
"snmptranslate\x00-Td\x00-Ob\x00-m\x00all\x00.1.0.0.0.1.5": {stdout: "TEST::testTableEntry.5\ntestTableEntry OBJECT-TYPE\n -- FROM\tTEST\n MAX-ACCESS\tnot-accessible\n STATUS\tcurrent\n INDEX\t\t{ server }\n::= { iso(1) 0 testOID(0) testTable(0) testTableEntry(1) 5 }\n", stderr: "", exitError: false},
|
||||||
"snmptranslate\x00-Td\x00-Ob\x00-m\x00all\x00.1.2.3": mockedCommandResult{stdout: "iso.2.3\niso OBJECT-TYPE\n -- FROM\t#-1\n::= { iso(1) 2 3 }\n", stderr: "", exitError: false},
|
"snmptranslate\x00-Td\x00-Ob\x00-m\x00all\x00.1.2.3": {stdout: "iso.2.3\niso OBJECT-TYPE\n -- FROM\t#-1\n::= { iso(1) 2 3 }\n", stderr: "", exitError: false},
|
||||||
"snmptranslate\x00-Td\x00-Ob\x00.iso.2.3": mockedCommandResult{stdout: "iso.2.3\niso OBJECT-TYPE\n -- FROM\t#-1\n::= { iso(1) 2 3 }\n", stderr: "", exitError: false},
|
"snmptranslate\x00-Td\x00-Ob\x00.iso.2.3": {stdout: "iso.2.3\niso OBJECT-TYPE\n -- FROM\t#-1\n::= { iso(1) 2 3 }\n", stderr: "", exitError: false},
|
||||||
"snmptranslate\x00-Td\x00-Ob\x00-m\x00all\x00.999": mockedCommandResult{stdout: ".999\n [TRUNCATED]\n", stderr: "", exitError: false},
|
"snmptranslate\x00-Td\x00-Ob\x00-m\x00all\x00.999": {stdout: ".999\n [TRUNCATED]\n", stderr: "", exitError: false},
|
||||||
"snmptranslate\x00-Td\x00-Ob\x00TEST::server": mockedCommandResult{stdout: "TEST::server\nserver OBJECT-TYPE\n -- FROM\tTEST\n SYNTAX\tOCTET STRING\n MAX-ACCESS\tread-only\n STATUS\tcurrent\n::= { iso(1) 0 testOID(0) testTable(0) testTableEntry(1) 1 }\n", stderr: "", exitError: false},
|
"snmptranslate\x00-Td\x00-Ob\x00TEST::server": {stdout: "TEST::server\nserver OBJECT-TYPE\n -- FROM\tTEST\n SYNTAX\tOCTET STRING\n MAX-ACCESS\tread-only\n STATUS\tcurrent\n::= { iso(1) 0 testOID(0) testTable(0) testTableEntry(1) 1 }\n", stderr: "", exitError: false},
|
||||||
"snmptranslate\x00-Td\x00-Ob\x00TEST::server.0": mockedCommandResult{stdout: "TEST::server.0\nserver OBJECT-TYPE\n -- FROM\tTEST\n SYNTAX\tOCTET STRING\n MAX-ACCESS\tread-only\n STATUS\tcurrent\n::= { iso(1) 0 testOID(0) testTable(0) testTableEntry(1) server(1) 0 }\n", stderr: "", exitError: false},
|
"snmptranslate\x00-Td\x00-Ob\x00TEST::server.0": {stdout: "TEST::server.0\nserver OBJECT-TYPE\n -- FROM\tTEST\n SYNTAX\tOCTET STRING\n MAX-ACCESS\tread-only\n STATUS\tcurrent\n::= { iso(1) 0 testOID(0) testTable(0) testTableEntry(1) server(1) 0 }\n", stderr: "", exitError: false},
|
||||||
"snmptranslate\x00-Td\x00-Ob\x00TEST::testTable": mockedCommandResult{stdout: "TEST::testTable\ntestTable OBJECT-TYPE\n -- FROM\tTEST\n MAX-ACCESS\tnot-accessible\n STATUS\tcurrent\n::= { iso(1) 0 testOID(0) 0 }\n", stderr: "", exitError: false},
|
"snmptranslate\x00-Td\x00-Ob\x00TEST::testTable": {stdout: "TEST::testTable\ntestTable OBJECT-TYPE\n -- FROM\tTEST\n MAX-ACCESS\tnot-accessible\n STATUS\tcurrent\n::= { iso(1) 0 testOID(0) 0 }\n", stderr: "", exitError: false},
|
||||||
"snmptranslate\x00-Td\x00-Ob\x00TEST::connections": mockedCommandResult{stdout: "TEST::connections\nconnections OBJECT-TYPE\n -- FROM\tTEST\n SYNTAX\tINTEGER\n MAX-ACCESS\tread-only\n STATUS\tcurrent\n::= { iso(1) 0 testOID(0) testTable(0) testTableEntry(1) 2 }\n", stderr: "", exitError: false},
|
"snmptranslate\x00-Td\x00-Ob\x00TEST::connections": {stdout: "TEST::connections\nconnections OBJECT-TYPE\n -- FROM\tTEST\n SYNTAX\tINTEGER\n MAX-ACCESS\tread-only\n STATUS\tcurrent\n::= { iso(1) 0 testOID(0) testTable(0) testTableEntry(1) 2 }\n", stderr: "", exitError: false},
|
||||||
"snmptranslate\x00-Td\x00-Ob\x00TEST::latency": mockedCommandResult{stdout: "TEST::latency\nlatency OBJECT-TYPE\n -- FROM\tTEST\n SYNTAX\tOCTET STRING\n MAX-ACCESS\tread-only\n STATUS\tcurrent\n::= { iso(1) 0 testOID(0) testTable(0) testTableEntry(1) 3 }\n", stderr: "", exitError: false},
|
"snmptranslate\x00-Td\x00-Ob\x00TEST::latency": {stdout: "TEST::latency\nlatency OBJECT-TYPE\n -- FROM\tTEST\n SYNTAX\tOCTET STRING\n MAX-ACCESS\tread-only\n STATUS\tcurrent\n::= { iso(1) 0 testOID(0) testTable(0) testTableEntry(1) 3 }\n", stderr: "", exitError: false},
|
||||||
"snmptranslate\x00-Td\x00-Ob\x00TEST::description": mockedCommandResult{stdout: "TEST::description\ndescription OBJECT-TYPE\n -- FROM\tTEST\n SYNTAX\tOCTET STRING\n MAX-ACCESS\tread-only\n STATUS\tcurrent\n::= { iso(1) 0 testOID(0) testTable(0) testTableEntry(1) 4 }\n", stderr: "", exitError: false},
|
"snmptranslate\x00-Td\x00-Ob\x00TEST::description": {stdout: "TEST::description\ndescription OBJECT-TYPE\n -- FROM\tTEST\n SYNTAX\tOCTET STRING\n MAX-ACCESS\tread-only\n STATUS\tcurrent\n::= { iso(1) 0 testOID(0) testTable(0) testTableEntry(1) 4 }\n", stderr: "", exitError: false},
|
||||||
"snmptranslate\x00-Td\x00-Ob\x00TEST::hostname": mockedCommandResult{stdout: "TEST::hostname\nhostname OBJECT-TYPE\n -- FROM\tTEST\n SYNTAX\tOCTET STRING\n MAX-ACCESS\tread-only\n STATUS\tcurrent\n::= { iso(1) 0 testOID(0) 1 1 }\n", stderr: "", exitError: false},
|
"snmptranslate\x00-Td\x00-Ob\x00TEST::hostname": {stdout: "TEST::hostname\nhostname OBJECT-TYPE\n -- FROM\tTEST\n SYNTAX\tOCTET STRING\n MAX-ACCESS\tread-only\n STATUS\tcurrent\n::= { iso(1) 0 testOID(0) 1 1 }\n", stderr: "", exitError: false},
|
||||||
"snmptranslate\x00-Td\x00-Ob\x00IF-MIB::ifPhysAddress.1": mockedCommandResult{stdout: "IF-MIB::ifPhysAddress.1\nifPhysAddress OBJECT-TYPE\n -- FROM\tIF-MIB\n -- TEXTUAL CONVENTION PhysAddress\n SYNTAX\tOCTET STRING\n DISPLAY-HINT\t\"1x:\"\n MAX-ACCESS\tread-only\n STATUS\tcurrent\n DESCRIPTION\t\"The interface's address at its protocol sub-layer. For\n example, for an 802.x interface, this object normally\n contains a MAC address. The interface's media-specific MIB\n must define the bit and byte ordering and the format of the\n value of this object. For interfaces which do not have such\n an address (e.g., a serial line), this object should contain\n an octet string of zero length.\"\n::= { iso(1) org(3) dod(6) internet(1) mgmt(2) mib-2(1) interfaces(2) ifTable(2) ifEntry(1) ifPhysAddress(6) 1 }\n", stderr: "", exitError: false},
|
"snmptranslate\x00-Td\x00-Ob\x00IF-MIB::ifPhysAddress.1": {stdout: "IF-MIB::ifPhysAddress.1\nifPhysAddress OBJECT-TYPE\n -- FROM\tIF-MIB\n -- TEXTUAL CONVENTION PhysAddress\n SYNTAX\tOCTET STRING\n DISPLAY-HINT\t\"1x:\"\n MAX-ACCESS\tread-only\n STATUS\tcurrent\n DESCRIPTION\t\"The interface's address at its protocol sub-layer. For\n example, for an 802.x interface, this object normally\n contains a MAC address. The interface's media-specific MIB\n must define the bit and byte ordering and the format of the\n value of this object. For interfaces which do not have such\n an address (e.g., a serial line), this object should contain\n an octet string of zero length.\"\n::= { iso(1) org(3) dod(6) internet(1) mgmt(2) mib-2(1) interfaces(2) ifTable(2) ifEntry(1) ifPhysAddress(6) 1 }\n", stderr: "", exitError: false},
|
||||||
"snmptranslate\x00-Td\x00-Ob\x00BRIDGE-MIB::dot1dTpFdbAddress.1": mockedCommandResult{stdout: "BRIDGE-MIB::dot1dTpFdbAddress.1\ndot1dTpFdbAddress OBJECT-TYPE\n -- FROM\tBRIDGE-MIB\n -- TEXTUAL CONVENTION MacAddress\n SYNTAX\tOCTET STRING (6) \n DISPLAY-HINT\t\"1x:\"\n MAX-ACCESS\tread-only\n STATUS\tcurrent\n DESCRIPTION\t\"A unicast MAC address for which the bridge has\n forwarding and/or filtering information.\"\n::= { iso(1) org(3) dod(6) internet(1) mgmt(2) mib-2(1) dot1dBridge(17) dot1dTp(4) dot1dTpFdbTable(3) dot1dTpFdbEntry(1) dot1dTpFdbAddress(1) 1 }\n", stderr: "", exitError: false},
|
"snmptranslate\x00-Td\x00-Ob\x00BRIDGE-MIB::dot1dTpFdbAddress.1": {stdout: "BRIDGE-MIB::dot1dTpFdbAddress.1\ndot1dTpFdbAddress OBJECT-TYPE\n -- FROM\tBRIDGE-MIB\n -- TEXTUAL CONVENTION MacAddress\n SYNTAX\tOCTET STRING (6) \n DISPLAY-HINT\t\"1x:\"\n MAX-ACCESS\tread-only\n STATUS\tcurrent\n DESCRIPTION\t\"A unicast MAC address for which the bridge has\n forwarding and/or filtering information.\"\n::= { iso(1) org(3) dod(6) internet(1) mgmt(2) mib-2(1) dot1dBridge(17) dot1dTp(4) dot1dTpFdbTable(3) dot1dTpFdbEntry(1) dot1dTpFdbAddress(1) 1 }\n", stderr: "", exitError: false},
|
||||||
"snmptranslate\x00-Td\x00-Ob\x00TCP-MIB::tcpConnectionLocalAddress.1": mockedCommandResult{stdout: "TCP-MIB::tcpConnectionLocalAddress.1\ntcpConnectionLocalAddress OBJECT-TYPE\n -- FROM\tTCP-MIB\n -- TEXTUAL CONVENTION InetAddress\n SYNTAX\tOCTET STRING (0..255) \n MAX-ACCESS\tnot-accessible\n STATUS\tcurrent\n DESCRIPTION\t\"The local IP address for this TCP connection. The type\n of this address is determined by the value of\n tcpConnectionLocalAddressType.\n\n As this object is used in the index for the\n tcpConnectionTable, implementors should be\n careful not to create entries that would result in OIDs\n with more than 128 subidentifiers; otherwise the information\n cannot be accessed by using SNMPv1, SNMPv2c, or SNMPv3.\"\n::= { iso(1) org(3) dod(6) internet(1) mgmt(2) mib-2(1) tcp(6) tcpConnectionTable(19) tcpConnectionEntry(1) tcpConnectionLocalAddress(2) 1 }\n", stderr: "", exitError: false},
|
"snmptranslate\x00-Td\x00-Ob\x00TCP-MIB::tcpConnectionLocalAddress.1": {stdout: "TCP-MIB::tcpConnectionLocalAddress.1\ntcpConnectionLocalAddress OBJECT-TYPE\n -- FROM\tTCP-MIB\n -- TEXTUAL CONVENTION InetAddress\n SYNTAX\tOCTET STRING (0..255) \n MAX-ACCESS\tnot-accessible\n STATUS\tcurrent\n DESCRIPTION\t\"The local IP address for this TCP connection. The type\n of this address is determined by the value of\n tcpConnectionLocalAddressType.\n\n As this object is used in the index for the\n tcpConnectionTable, implementors should be\n careful not to create entries that would result in OIDs\n with more than 128 subidentifiers; otherwise the information\n cannot be accessed by using SNMPv1, SNMPv2c, or SNMPv3.\"\n::= { iso(1) org(3) dod(6) internet(1) mgmt(2) mib-2(1) tcp(6) tcpConnectionTable(19) tcpConnectionEntry(1) tcpConnectionLocalAddress(2) 1 }\n", stderr: "", exitError: false},
|
||||||
"snmptranslate\x00-Td\x00TEST::testTable.1": mockedCommandResult{stdout: "TEST::testTableEntry\ntestTableEntry OBJECT-TYPE\n -- FROM\tTEST\n MAX-ACCESS\tnot-accessible\n STATUS\tcurrent\n INDEX\t\t{ server }\n::= { iso(1) 0 testOID(0) testTable(0) 1 }\n", stderr: "", exitError: false},
|
"snmptranslate\x00-Td\x00TEST::testTable.1": {stdout: "TEST::testTableEntry\ntestTableEntry OBJECT-TYPE\n -- FROM\tTEST\n MAX-ACCESS\tnot-accessible\n STATUS\tcurrent\n INDEX\t\t{ server }\n::= { iso(1) 0 testOID(0) testTable(0) 1 }\n", stderr: "", exitError: false},
|
||||||
"snmptable\x00-Ch\x00-Cl\x00-c\x00public\x00127.0.0.1\x00TEST::testTable": mockedCommandResult{stdout: "server connections latency description \nTEST::testTable: No entries\n", stderr: "", exitError: false},
|
"snmptable\x00-Ch\x00-Cl\x00-c\x00public\x00127.0.0.1\x00TEST::testTable": {stdout: "server connections latency description \nTEST::testTable: No entries\n", stderr: "", exitError: false},
|
||||||
}
|
}
|
||||||
|
|
|
@ -721,7 +721,7 @@ func TestSnmpTranslateCache_miss(t *testing.T) {
|
||||||
|
|
||||||
func TestSnmpTranslateCache_hit(t *testing.T) {
|
func TestSnmpTranslateCache_hit(t *testing.T) {
|
||||||
snmpTranslateCaches = map[string]snmpTranslateCache{
|
snmpTranslateCaches = map[string]snmpTranslateCache{
|
||||||
"foo": snmpTranslateCache{
|
"foo": {
|
||||||
mibName: "a",
|
mibName: "a",
|
||||||
oidNum: "b",
|
oidNum: "b",
|
||||||
oidText: "c",
|
oidText: "c",
|
||||||
|
@ -754,7 +754,7 @@ func TestSnmpTableCache_miss(t *testing.T) {
|
||||||
|
|
||||||
func TestSnmpTableCache_hit(t *testing.T) {
|
func TestSnmpTableCache_hit(t *testing.T) {
|
||||||
snmpTableCaches = map[string]snmpTableCache{
|
snmpTableCaches = map[string]snmpTableCache{
|
||||||
"foo": snmpTableCache{
|
"foo": {
|
||||||
mibName: "a",
|
mibName: "a",
|
||||||
oidNum: "b",
|
oidNum: "b",
|
||||||
oidText: "c",
|
oidText: "c",
|
||||||
|
|
|
@ -874,21 +874,21 @@ func TestParse_DataDogTags(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
testTags := map[string]map[string]string{
|
testTags := map[string]map[string]string{
|
||||||
"my_counter": map[string]string{
|
"my_counter": {
|
||||||
"host": "localhost",
|
"host": "localhost",
|
||||||
"environment": "prod",
|
"environment": "prod",
|
||||||
"endpoint": "/:tenant?/oauth/ro",
|
"endpoint": "/:tenant?/oauth/ro",
|
||||||
},
|
},
|
||||||
|
|
||||||
"my_gauge": map[string]string{
|
"my_gauge": {
|
||||||
"live": "",
|
"live": "",
|
||||||
},
|
},
|
||||||
|
|
||||||
"my_set": map[string]string{
|
"my_set": {
|
||||||
"host": "localhost",
|
"host": "localhost",
|
||||||
},
|
},
|
||||||
|
|
||||||
"my_timer": map[string]string{
|
"my_timer": {
|
||||||
"live": "",
|
"live": "",
|
||||||
"host": "localhost",
|
"host": "localhost",
|
||||||
},
|
},
|
||||||
|
|
|
@ -34,7 +34,7 @@ func getTestCasesForRFC5425() []testCase5425 {
|
||||||
name: "1st/avg/ok",
|
name: "1st/avg/ok",
|
||||||
data: []byte(`188 <29>1 2016-02-21T04:32:57+00:00 web1 someservice 2341 2 [origin][meta sequence="14125553" service="someservice"] "GET /v1/ok HTTP/1.1" 200 145 "-" "hacheck 0.9.0" 24306 127.0.0.1:40124 575`),
|
data: []byte(`188 <29>1 2016-02-21T04:32:57+00:00 web1 someservice 2341 2 [origin][meta sequence="14125553" service="someservice"] "GET /v1/ok HTTP/1.1" 200 145 "-" "hacheck 0.9.0" 24306 127.0.0.1:40124 575`),
|
||||||
wantStrict: []testutil.Metric{
|
wantStrict: []testutil.Metric{
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "syslog",
|
Measurement: "syslog",
|
||||||
Fields: map[string]interface{}{
|
Fields: map[string]interface{}{
|
||||||
"version": uint16(1),
|
"version": uint16(1),
|
||||||
|
@ -58,7 +58,7 @@ func getTestCasesForRFC5425() []testCase5425 {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
wantBestEffort: []testutil.Metric{
|
wantBestEffort: []testutil.Metric{
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "syslog",
|
Measurement: "syslog",
|
||||||
Fields: map[string]interface{}{
|
Fields: map[string]interface{}{
|
||||||
"version": uint16(1),
|
"version": uint16(1),
|
||||||
|
@ -86,7 +86,7 @@ func getTestCasesForRFC5425() []testCase5425 {
|
||||||
name: "1st/min/ok//2nd/min/ok",
|
name: "1st/min/ok//2nd/min/ok",
|
||||||
data: []byte("16 <1>2 - - - - - -17 <4>11 - - - - - -"),
|
data: []byte("16 <1>2 - - - - - -17 <4>11 - - - - - -"),
|
||||||
wantStrict: []testutil.Metric{
|
wantStrict: []testutil.Metric{
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "syslog",
|
Measurement: "syslog",
|
||||||
Fields: map[string]interface{}{
|
Fields: map[string]interface{}{
|
||||||
"version": uint16(2),
|
"version": uint16(2),
|
||||||
|
@ -99,7 +99,7 @@ func getTestCasesForRFC5425() []testCase5425 {
|
||||||
},
|
},
|
||||||
Time: defaultTime,
|
Time: defaultTime,
|
||||||
},
|
},
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "syslog",
|
Measurement: "syslog",
|
||||||
Fields: map[string]interface{}{
|
Fields: map[string]interface{}{
|
||||||
"version": uint16(11),
|
"version": uint16(11),
|
||||||
|
@ -114,7 +114,7 @@ func getTestCasesForRFC5425() []testCase5425 {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
wantBestEffort: []testutil.Metric{
|
wantBestEffort: []testutil.Metric{
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "syslog",
|
Measurement: "syslog",
|
||||||
Fields: map[string]interface{}{
|
Fields: map[string]interface{}{
|
||||||
"version": uint16(2),
|
"version": uint16(2),
|
||||||
|
@ -127,7 +127,7 @@ func getTestCasesForRFC5425() []testCase5425 {
|
||||||
},
|
},
|
||||||
Time: defaultTime,
|
Time: defaultTime,
|
||||||
},
|
},
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "syslog",
|
Measurement: "syslog",
|
||||||
Fields: map[string]interface{}{
|
Fields: map[string]interface{}{
|
||||||
"version": uint16(11),
|
"version": uint16(11),
|
||||||
|
@ -146,7 +146,7 @@ func getTestCasesForRFC5425() []testCase5425 {
|
||||||
name: "1st/utf8/ok",
|
name: "1st/utf8/ok",
|
||||||
data: []byte("23 <1>1 - - - - - - hellø"),
|
data: []byte("23 <1>1 - - - - - - hellø"),
|
||||||
wantStrict: []testutil.Metric{
|
wantStrict: []testutil.Metric{
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "syslog",
|
Measurement: "syslog",
|
||||||
Fields: map[string]interface{}{
|
Fields: map[string]interface{}{
|
||||||
"version": uint16(1),
|
"version": uint16(1),
|
||||||
|
@ -162,7 +162,7 @@ func getTestCasesForRFC5425() []testCase5425 {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
wantBestEffort: []testutil.Metric{
|
wantBestEffort: []testutil.Metric{
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "syslog",
|
Measurement: "syslog",
|
||||||
Fields: map[string]interface{}{
|
Fields: map[string]interface{}{
|
||||||
"version": uint16(1),
|
"version": uint16(1),
|
||||||
|
@ -182,7 +182,7 @@ func getTestCasesForRFC5425() []testCase5425 {
|
||||||
name: "1st/nl/ok", // newline
|
name: "1st/nl/ok", // newline
|
||||||
data: []byte("28 <1>3 - - - - - - hello\nworld"),
|
data: []byte("28 <1>3 - - - - - - hello\nworld"),
|
||||||
wantStrict: []testutil.Metric{
|
wantStrict: []testutil.Metric{
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "syslog",
|
Measurement: "syslog",
|
||||||
Fields: map[string]interface{}{
|
Fields: map[string]interface{}{
|
||||||
"version": uint16(3),
|
"version": uint16(3),
|
||||||
|
@ -198,7 +198,7 @@ func getTestCasesForRFC5425() []testCase5425 {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
wantBestEffort: []testutil.Metric{
|
wantBestEffort: []testutil.Metric{
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "syslog",
|
Measurement: "syslog",
|
||||||
Fields: map[string]interface{}{
|
Fields: map[string]interface{}{
|
||||||
"version": uint16(3),
|
"version": uint16(3),
|
||||||
|
@ -219,7 +219,7 @@ func getTestCasesForRFC5425() []testCase5425 {
|
||||||
data: []byte("16 <1>2"),
|
data: []byte("16 <1>2"),
|
||||||
wantStrict: nil,
|
wantStrict: nil,
|
||||||
wantBestEffort: []testutil.Metric{
|
wantBestEffort: []testutil.Metric{
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "syslog",
|
Measurement: "syslog",
|
||||||
Fields: map[string]interface{}{
|
Fields: map[string]interface{}{
|
||||||
"version": uint16(2),
|
"version": uint16(2),
|
||||||
|
@ -239,7 +239,7 @@ func getTestCasesForRFC5425() []testCase5425 {
|
||||||
name: "1st/min/ok",
|
name: "1st/min/ok",
|
||||||
data: []byte("16 <1>1 - - - - - -"),
|
data: []byte("16 <1>1 - - - - - -"),
|
||||||
wantStrict: []testutil.Metric{
|
wantStrict: []testutil.Metric{
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "syslog",
|
Measurement: "syslog",
|
||||||
Fields: map[string]interface{}{
|
Fields: map[string]interface{}{
|
||||||
"version": uint16(1),
|
"version": uint16(1),
|
||||||
|
@ -254,7 +254,7 @@ func getTestCasesForRFC5425() []testCase5425 {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
wantBestEffort: []testutil.Metric{
|
wantBestEffort: []testutil.Metric{
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "syslog",
|
Measurement: "syslog",
|
||||||
Fields: map[string]interface{}{
|
Fields: map[string]interface{}{
|
||||||
"version": uint16(1),
|
"version": uint16(1),
|
||||||
|
@ -274,7 +274,7 @@ func getTestCasesForRFC5425() []testCase5425 {
|
||||||
data: []byte("16 <1>217 <11>1 - - - - - -"),
|
data: []byte("16 <1>217 <11>1 - - - - - -"),
|
||||||
wantStrict: nil,
|
wantStrict: nil,
|
||||||
wantBestEffort: []testutil.Metric{
|
wantBestEffort: []testutil.Metric{
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "syslog",
|
Measurement: "syslog",
|
||||||
Fields: map[string]interface{}{
|
Fields: map[string]interface{}{
|
||||||
"version": uint16(217),
|
"version": uint16(217),
|
||||||
|
@ -299,7 +299,7 @@ func getTestCasesForRFC5425() []testCase5425 {
|
||||||
name: "1st/max/ok",
|
name: "1st/max/ok",
|
||||||
data: []byte(fmt.Sprintf("8192 <%d>%d %s %s %s %s %s - %s", maxP, maxV, maxTS, maxH, maxA, maxPID, maxMID, message7681)),
|
data: []byte(fmt.Sprintf("8192 <%d>%d %s %s %s %s %s - %s", maxP, maxV, maxTS, maxH, maxA, maxPID, maxMID, message7681)),
|
||||||
wantStrict: []testutil.Metric{
|
wantStrict: []testutil.Metric{
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "syslog",
|
Measurement: "syslog",
|
||||||
Fields: map[string]interface{}{
|
Fields: map[string]interface{}{
|
||||||
"version": maxV,
|
"version": maxV,
|
||||||
|
@ -320,7 +320,7 @@ func getTestCasesForRFC5425() []testCase5425 {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
wantBestEffort: []testutil.Metric{
|
wantBestEffort: []testutil.Metric{
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "syslog",
|
Measurement: "syslog",
|
||||||
Fields: map[string]interface{}{
|
Fields: map[string]interface{}{
|
||||||
"version": maxV,
|
"version": maxV,
|
||||||
|
|
|
@ -111,7 +111,7 @@ func (t *Tail) tailNewFiles(fromBeginning bool) error {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.acc.AddError(fmt.Errorf("E! Error Glob %s failed to compile, %s", filepath, err))
|
t.acc.AddError(fmt.Errorf("E! Error Glob %s failed to compile, %s", filepath, err))
|
||||||
}
|
}
|
||||||
for file, _ := range g.Match() {
|
for file := range g.Match() {
|
||||||
if _, ok := t.tailers[file]; ok {
|
if _, ok := t.tailers[file]; ok {
|
||||||
// we're already tailing this file
|
// we're already tailing this file
|
||||||
continue
|
continue
|
||||||
|
|
|
@ -113,16 +113,16 @@ MEMPOOL.vbc.sz_wanted 88 . Size requested
|
||||||
`
|
`
|
||||||
|
|
||||||
var parsedSmOutput = map[string]map[string]interface{}{
|
var parsedSmOutput = map[string]map[string]interface{}{
|
||||||
"MAIN": map[string]interface{}{
|
"MAIN": {
|
||||||
"uptime": uint64(895),
|
"uptime": uint64(895),
|
||||||
"cache_hit": uint64(95),
|
"cache_hit": uint64(95),
|
||||||
"cache_miss": uint64(5),
|
"cache_miss": uint64(5),
|
||||||
},
|
},
|
||||||
"MGT": map[string]interface{}{
|
"MGT": {
|
||||||
"uptime": uint64(896),
|
"uptime": uint64(896),
|
||||||
"child_start": uint64(1),
|
"child_start": uint64(1),
|
||||||
},
|
},
|
||||||
"MEMPOOL": map[string]interface{}{
|
"MEMPOOL": {
|
||||||
"vbc.live": uint64(0),
|
"vbc.live": uint64(0),
|
||||||
"vbc.pool": uint64(10),
|
"vbc.pool": uint64(10),
|
||||||
"vbc.sz_wanted": uint64(88),
|
"vbc.sz_wanted": uint64(88),
|
||||||
|
|
|
@ -382,7 +382,7 @@ func TestNewBinaryAnnotations(t *testing.T) {
|
||||||
name: "myservice",
|
name: "myservice",
|
||||||
},
|
},
|
||||||
want: []trace.BinaryAnnotation{
|
want: []trace.BinaryAnnotation{
|
||||||
trace.BinaryAnnotation{
|
{
|
||||||
Host: "myhost",
|
Host: "myhost",
|
||||||
ServiceName: "myservice",
|
ServiceName: "myservice",
|
||||||
Key: "mykey",
|
Key: "mykey",
|
||||||
|
@ -424,7 +424,7 @@ func TestNewAnnotations(t *testing.T) {
|
||||||
name: "myservice",
|
name: "myservice",
|
||||||
},
|
},
|
||||||
want: []trace.Annotation{
|
want: []trace.Annotation{
|
||||||
trace.Annotation{
|
{
|
||||||
Host: "myhost",
|
Host: "myhost",
|
||||||
ServiceName: "myservice",
|
ServiceName: "myservice",
|
||||||
Timestamp: time.Unix(0, 0).UTC(),
|
Timestamp: time.Unix(0, 0).UTC(),
|
||||||
|
|
|
@ -113,7 +113,7 @@ func TestUnmarshalThrift(t *testing.T) {
|
||||||
Duration: addr(53106),
|
Duration: addr(53106),
|
||||||
Annotations: []*zipkincore.Annotation{},
|
Annotations: []*zipkincore.Annotation{},
|
||||||
BinaryAnnotations: []*zipkincore.BinaryAnnotation{
|
BinaryAnnotations: []*zipkincore.BinaryAnnotation{
|
||||||
&zipkincore.BinaryAnnotation{
|
{
|
||||||
Key: "lc",
|
Key: "lc",
|
||||||
AnnotationType: zipkincore.AnnotationType_STRING,
|
AnnotationType: zipkincore.AnnotationType_STRING,
|
||||||
Value: []byte("trivial"),
|
Value: []byte("trivial"),
|
||||||
|
@ -133,7 +133,7 @@ func TestUnmarshalThrift(t *testing.T) {
|
||||||
Duration: addr(50410),
|
Duration: addr(50410),
|
||||||
Annotations: []*zipkincore.Annotation{},
|
Annotations: []*zipkincore.Annotation{},
|
||||||
BinaryAnnotations: []*zipkincore.BinaryAnnotation{
|
BinaryAnnotations: []*zipkincore.BinaryAnnotation{
|
||||||
&zipkincore.BinaryAnnotation{
|
{
|
||||||
Key: "lc",
|
Key: "lc",
|
||||||
AnnotationType: zipkincore.AnnotationType_STRING,
|
AnnotationType: zipkincore.AnnotationType_STRING,
|
||||||
Value: []byte("trivial"),
|
Value: []byte("trivial"),
|
||||||
|
@ -151,7 +151,7 @@ func TestUnmarshalThrift(t *testing.T) {
|
||||||
Timestamp: addr(1498688360851318),
|
Timestamp: addr(1498688360851318),
|
||||||
Duration: addr(103680),
|
Duration: addr(103680),
|
||||||
Annotations: []*zipkincore.Annotation{
|
Annotations: []*zipkincore.Annotation{
|
||||||
&zipkincore.Annotation{
|
{
|
||||||
Timestamp: 1498688360851325,
|
Timestamp: 1498688360851325,
|
||||||
Value: "Starting child #0",
|
Value: "Starting child #0",
|
||||||
Host: &zipkincore.Endpoint{
|
Host: &zipkincore.Endpoint{
|
||||||
|
@ -159,7 +159,7 @@ func TestUnmarshalThrift(t *testing.T) {
|
||||||
ServiceName: "trivial",
|
ServiceName: "trivial",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
&zipkincore.Annotation{
|
{
|
||||||
Timestamp: 1498688360904545,
|
Timestamp: 1498688360904545,
|
||||||
Value: "Starting child #1",
|
Value: "Starting child #1",
|
||||||
Host: &zipkincore.Endpoint{
|
Host: &zipkincore.Endpoint{
|
||||||
|
@ -167,7 +167,7 @@ func TestUnmarshalThrift(t *testing.T) {
|
||||||
ServiceName: "trivial",
|
ServiceName: "trivial",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
&zipkincore.Annotation{
|
{
|
||||||
Timestamp: 1498688360954992,
|
Timestamp: 1498688360954992,
|
||||||
Value: "A Log",
|
Value: "A Log",
|
||||||
Host: &zipkincore.Endpoint{
|
Host: &zipkincore.Endpoint{
|
||||||
|
@ -177,7 +177,7 @@ func TestUnmarshalThrift(t *testing.T) {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
BinaryAnnotations: []*zipkincore.BinaryAnnotation{
|
BinaryAnnotations: []*zipkincore.BinaryAnnotation{
|
||||||
&zipkincore.BinaryAnnotation{
|
{
|
||||||
Key: "lc",
|
Key: "lc",
|
||||||
AnnotationType: zipkincore.AnnotationType_STRING,
|
AnnotationType: zipkincore.AnnotationType_STRING,
|
||||||
Value: []byte("trivial"),
|
Value: []byte("trivial"),
|
||||||
|
|
|
@ -108,7 +108,7 @@ func TestLineProtocolConverter_Record(t *testing.T) {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
want: []testutil.Metric{
|
want: []testutil.Metric{
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "zipkin",
|
Measurement: "zipkin",
|
||||||
Tags: map[string]string{
|
Tags: map[string]string{
|
||||||
"id": "8090652509916334619",
|
"id": "8090652509916334619",
|
||||||
|
@ -122,7 +122,7 @@ func TestLineProtocolConverter_Record(t *testing.T) {
|
||||||
},
|
},
|
||||||
Time: time.Unix(0, 1498688360851331000).UTC(),
|
Time: time.Unix(0, 1498688360851331000).UTC(),
|
||||||
},
|
},
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "zipkin",
|
Measurement: "zipkin",
|
||||||
Tags: map[string]string{
|
Tags: map[string]string{
|
||||||
"id": "8090652509916334619",
|
"id": "8090652509916334619",
|
||||||
|
@ -139,7 +139,7 @@ func TestLineProtocolConverter_Record(t *testing.T) {
|
||||||
},
|
},
|
||||||
Time: time.Unix(0, 1498688360851331000).UTC(),
|
Time: time.Unix(0, 1498688360851331000).UTC(),
|
||||||
},
|
},
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "zipkin",
|
Measurement: "zipkin",
|
||||||
Tags: map[string]string{
|
Tags: map[string]string{
|
||||||
"id": "103618986556047333",
|
"id": "103618986556047333",
|
||||||
|
@ -153,7 +153,7 @@ func TestLineProtocolConverter_Record(t *testing.T) {
|
||||||
},
|
},
|
||||||
Time: time.Unix(0, 1498688360904552000).UTC(),
|
Time: time.Unix(0, 1498688360904552000).UTC(),
|
||||||
},
|
},
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "zipkin",
|
Measurement: "zipkin",
|
||||||
Tags: map[string]string{
|
Tags: map[string]string{
|
||||||
"id": "103618986556047333",
|
"id": "103618986556047333",
|
||||||
|
@ -170,7 +170,7 @@ func TestLineProtocolConverter_Record(t *testing.T) {
|
||||||
},
|
},
|
||||||
Time: time.Unix(0, 1498688360904552000).UTC(),
|
Time: time.Unix(0, 1498688360904552000).UTC(),
|
||||||
},
|
},
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "zipkin",
|
Measurement: "zipkin",
|
||||||
Tags: map[string]string{
|
Tags: map[string]string{
|
||||||
"id": "22964302721410078",
|
"id": "22964302721410078",
|
||||||
|
@ -184,7 +184,7 @@ func TestLineProtocolConverter_Record(t *testing.T) {
|
||||||
},
|
},
|
||||||
Time: time.Unix(0, 1498688360851318000).UTC(),
|
Time: time.Unix(0, 1498688360851318000).UTC(),
|
||||||
},
|
},
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "zipkin",
|
Measurement: "zipkin",
|
||||||
Tags: map[string]string{
|
Tags: map[string]string{
|
||||||
"service_name": "trivial",
|
"service_name": "trivial",
|
||||||
|
@ -200,7 +200,7 @@ func TestLineProtocolConverter_Record(t *testing.T) {
|
||||||
},
|
},
|
||||||
Time: time.Unix(0, 1498688360851318000).UTC(),
|
Time: time.Unix(0, 1498688360851318000).UTC(),
|
||||||
},
|
},
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "zipkin",
|
Measurement: "zipkin",
|
||||||
Tags: map[string]string{
|
Tags: map[string]string{
|
||||||
"service_name": "trivial",
|
"service_name": "trivial",
|
||||||
|
@ -216,7 +216,7 @@ func TestLineProtocolConverter_Record(t *testing.T) {
|
||||||
},
|
},
|
||||||
Time: time.Unix(0, 1498688360851318000).UTC(),
|
Time: time.Unix(0, 1498688360851318000).UTC(),
|
||||||
},
|
},
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "zipkin",
|
Measurement: "zipkin",
|
||||||
Tags: map[string]string{
|
Tags: map[string]string{
|
||||||
"parent_id": "22964302721410078",
|
"parent_id": "22964302721410078",
|
||||||
|
@ -232,7 +232,7 @@ func TestLineProtocolConverter_Record(t *testing.T) {
|
||||||
},
|
},
|
||||||
Time: time.Unix(0, 1498688360851318000).UTC(),
|
Time: time.Unix(0, 1498688360851318000).UTC(),
|
||||||
},
|
},
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "zipkin",
|
Measurement: "zipkin",
|
||||||
Tags: map[string]string{
|
Tags: map[string]string{
|
||||||
"trace_id": "2505404965370368069",
|
"trace_id": "2505404965370368069",
|
||||||
|
@ -283,7 +283,7 @@ func TestLineProtocolConverter_Record(t *testing.T) {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
want: []testutil.Metric{
|
want: []testutil.Metric{
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "zipkin",
|
Measurement: "zipkin",
|
||||||
Tags: map[string]string{
|
Tags: map[string]string{
|
||||||
"id": "6802735349851856000",
|
"id": "6802735349851856000",
|
||||||
|
@ -297,7 +297,7 @@ func TestLineProtocolConverter_Record(t *testing.T) {
|
||||||
},
|
},
|
||||||
Time: time.Unix(1, 0).UTC(),
|
Time: time.Unix(1, 0).UTC(),
|
||||||
},
|
},
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "zipkin",
|
Measurement: "zipkin",
|
||||||
Tags: map[string]string{
|
Tags: map[string]string{
|
||||||
"annotation": "cs",
|
"annotation": "cs",
|
||||||
|
|
|
@ -27,7 +27,7 @@ func TestZipkinPlugin(t *testing.T) {
|
||||||
datafile: "testdata/threespans.dat",
|
datafile: "testdata/threespans.dat",
|
||||||
contentType: "application/x-thrift",
|
contentType: "application/x-thrift",
|
||||||
want: []testutil.Metric{
|
want: []testutil.Metric{
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "zipkin",
|
Measurement: "zipkin",
|
||||||
Tags: map[string]string{
|
Tags: map[string]string{
|
||||||
"id": "7047c59776af8a1b",
|
"id": "7047c59776af8a1b",
|
||||||
|
@ -41,7 +41,7 @@ func TestZipkinPlugin(t *testing.T) {
|
||||||
},
|
},
|
||||||
Time: time.Unix(0, 1498688360851331000).UTC(),
|
Time: time.Unix(0, 1498688360851331000).UTC(),
|
||||||
},
|
},
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "zipkin",
|
Measurement: "zipkin",
|
||||||
Tags: map[string]string{
|
Tags: map[string]string{
|
||||||
"id": "7047c59776af8a1b",
|
"id": "7047c59776af8a1b",
|
||||||
|
@ -58,7 +58,7 @@ func TestZipkinPlugin(t *testing.T) {
|
||||||
},
|
},
|
||||||
Time: time.Unix(0, 1498688360851331000).UTC(),
|
Time: time.Unix(0, 1498688360851331000).UTC(),
|
||||||
},
|
},
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "zipkin",
|
Measurement: "zipkin",
|
||||||
Tags: map[string]string{
|
Tags: map[string]string{
|
||||||
"id": "17020eb55a8bfe5",
|
"id": "17020eb55a8bfe5",
|
||||||
|
@ -72,7 +72,7 @@ func TestZipkinPlugin(t *testing.T) {
|
||||||
},
|
},
|
||||||
Time: time.Unix(0, 1498688360904552000).UTC(),
|
Time: time.Unix(0, 1498688360904552000).UTC(),
|
||||||
},
|
},
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "zipkin",
|
Measurement: "zipkin",
|
||||||
Tags: map[string]string{
|
Tags: map[string]string{
|
||||||
"id": "17020eb55a8bfe5",
|
"id": "17020eb55a8bfe5",
|
||||||
|
@ -89,7 +89,7 @@ func TestZipkinPlugin(t *testing.T) {
|
||||||
},
|
},
|
||||||
Time: time.Unix(0, 1498688360904552000).UTC(),
|
Time: time.Unix(0, 1498688360904552000).UTC(),
|
||||||
},
|
},
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "zipkin",
|
Measurement: "zipkin",
|
||||||
Tags: map[string]string{
|
Tags: map[string]string{
|
||||||
"id": "5195e96239641e",
|
"id": "5195e96239641e",
|
||||||
|
@ -103,7 +103,7 @@ func TestZipkinPlugin(t *testing.T) {
|
||||||
},
|
},
|
||||||
Time: time.Unix(0, 1498688360851318000).UTC(),
|
Time: time.Unix(0, 1498688360851318000).UTC(),
|
||||||
},
|
},
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "zipkin",
|
Measurement: "zipkin",
|
||||||
Tags: map[string]string{
|
Tags: map[string]string{
|
||||||
"service_name": "trivial",
|
"service_name": "trivial",
|
||||||
|
@ -119,7 +119,7 @@ func TestZipkinPlugin(t *testing.T) {
|
||||||
},
|
},
|
||||||
Time: time.Unix(0, 1498688360851318000).UTC(),
|
Time: time.Unix(0, 1498688360851318000).UTC(),
|
||||||
},
|
},
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "zipkin",
|
Measurement: "zipkin",
|
||||||
Tags: map[string]string{
|
Tags: map[string]string{
|
||||||
"service_name": "trivial",
|
"service_name": "trivial",
|
||||||
|
@ -135,7 +135,7 @@ func TestZipkinPlugin(t *testing.T) {
|
||||||
},
|
},
|
||||||
Time: time.Unix(0, 1498688360851318000).UTC(),
|
Time: time.Unix(0, 1498688360851318000).UTC(),
|
||||||
},
|
},
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "zipkin",
|
Measurement: "zipkin",
|
||||||
Tags: map[string]string{
|
Tags: map[string]string{
|
||||||
"parent_id": "5195e96239641e",
|
"parent_id": "5195e96239641e",
|
||||||
|
@ -151,7 +151,7 @@ func TestZipkinPlugin(t *testing.T) {
|
||||||
},
|
},
|
||||||
Time: time.Unix(0, 1498688360851318000).UTC(),
|
Time: time.Unix(0, 1498688360851318000).UTC(),
|
||||||
},
|
},
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "zipkin",
|
Measurement: "zipkin",
|
||||||
Tags: map[string]string{
|
Tags: map[string]string{
|
||||||
"trace_id": "22c4fc8ab3669045",
|
"trace_id": "22c4fc8ab3669045",
|
||||||
|
@ -176,7 +176,7 @@ func TestZipkinPlugin(t *testing.T) {
|
||||||
datafile: "testdata/distributed_trace_sample.dat",
|
datafile: "testdata/distributed_trace_sample.dat",
|
||||||
contentType: "application/x-thrift",
|
contentType: "application/x-thrift",
|
||||||
want: []testutil.Metric{
|
want: []testutil.Metric{
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "zipkin",
|
Measurement: "zipkin",
|
||||||
Tags: map[string]string{
|
Tags: map[string]string{
|
||||||
"id": "5e682bc21ce99c80",
|
"id": "5e682bc21ce99c80",
|
||||||
|
@ -190,7 +190,7 @@ func TestZipkinPlugin(t *testing.T) {
|
||||||
},
|
},
|
||||||
Time: time.Unix(0, 1433330263415871*int64(time.Microsecond)).UTC(),
|
Time: time.Unix(0, 1433330263415871*int64(time.Microsecond)).UTC(),
|
||||||
},
|
},
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "zipkin",
|
Measurement: "zipkin",
|
||||||
Tags: map[string]string{
|
Tags: map[string]string{
|
||||||
"annotation": "cs",
|
"annotation": "cs",
|
||||||
|
@ -206,7 +206,7 @@ func TestZipkinPlugin(t *testing.T) {
|
||||||
},
|
},
|
||||||
Time: time.Unix(0, 1433330263415871*int64(time.Microsecond)).UTC(),
|
Time: time.Unix(0, 1433330263415871*int64(time.Microsecond)).UTC(),
|
||||||
},
|
},
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "zipkin",
|
Measurement: "zipkin",
|
||||||
Tags: map[string]string{
|
Tags: map[string]string{
|
||||||
"annotation": "cr",
|
"annotation": "cr",
|
||||||
|
@ -486,7 +486,7 @@ func TestZipkinPlugin(t *testing.T) {
|
||||||
},
|
},
|
||||||
Time: time.Unix(0, 1503031538778000*int64(time.Microsecond)).UTC(),
|
Time: time.Unix(0, 1503031538778000*int64(time.Microsecond)).UTC(),
|
||||||
},
|
},
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "zipkin",
|
Measurement: "zipkin",
|
||||||
Tags: map[string]string{
|
Tags: map[string]string{
|
||||||
"annotation": "ss",
|
"annotation": "ss",
|
||||||
|
@ -502,7 +502,7 @@ func TestZipkinPlugin(t *testing.T) {
|
||||||
},
|
},
|
||||||
Time: time.Unix(0, 1503031538778000*int64(time.Microsecond)).UTC(),
|
Time: time.Unix(0, 1503031538778000*int64(time.Microsecond)).UTC(),
|
||||||
},
|
},
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "zipkin",
|
Measurement: "zipkin",
|
||||||
Tags: map[string]string{
|
Tags: map[string]string{
|
||||||
"annotation": "Demo2Application",
|
"annotation": "Demo2Application",
|
||||||
|
@ -519,7 +519,7 @@ func TestZipkinPlugin(t *testing.T) {
|
||||||
},
|
},
|
||||||
Time: time.Unix(0, 1503031538778000*int64(time.Microsecond)).UTC(),
|
Time: time.Unix(0, 1503031538778000*int64(time.Microsecond)).UTC(),
|
||||||
},
|
},
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "zipkin",
|
Measurement: "zipkin",
|
||||||
Tags: map[string]string{
|
Tags: map[string]string{
|
||||||
"annotation": "hi",
|
"annotation": "hi",
|
||||||
|
@ -536,7 +536,7 @@ func TestZipkinPlugin(t *testing.T) {
|
||||||
},
|
},
|
||||||
Time: time.Unix(0, 1503031538778000*int64(time.Microsecond)).UTC(),
|
Time: time.Unix(0, 1503031538778000*int64(time.Microsecond)).UTC(),
|
||||||
},
|
},
|
||||||
testutil.Metric{
|
{
|
||||||
Measurement: "zipkin",
|
Measurement: "zipkin",
|
||||||
Tags: map[string]string{
|
Tags: map[string]string{
|
||||||
"annotation": "192.168.0.8:test:8010",
|
"annotation": "192.168.0.8:test:8010",
|
||||||
|
|
|
@ -402,7 +402,7 @@ func translate(m telegraf.Metric, prefix string) (*azureMonitorMetric, error) {
|
||||||
Namespace: ns,
|
Namespace: ns,
|
||||||
DimensionNames: dimensionNames,
|
DimensionNames: dimensionNames,
|
||||||
Series: []*azureMonitorSeries{
|
Series: []*azureMonitorSeries{
|
||||||
&azureMonitorSeries{
|
{
|
||||||
DimensionValues: dimensionValues,
|
DimensionValues: dimensionValues,
|
||||||
Min: min,
|
Min: min,
|
||||||
Max: max,
|
Max: max,
|
||||||
|
|
|
@ -29,7 +29,7 @@ func TestBuildDimensions(t *testing.T) {
|
||||||
|
|
||||||
tagKeys := make([]string, len(testPoint.Tags()))
|
tagKeys := make([]string, len(testPoint.Tags()))
|
||||||
i := 0
|
i := 0
|
||||||
for k, _ := range testPoint.Tags() {
|
for k := range testPoint.Tags() {
|
||||||
tagKeys[i] = k
|
tagKeys[i] = k
|
||||||
i += 1
|
i += 1
|
||||||
}
|
}
|
||||||
|
|
|
@ -165,7 +165,7 @@ func escapeObject(m map[string]interface{}) (string, error) {
|
||||||
// We find all keys and sort them first because iterating a map in go is
|
// We find all keys and sort them first because iterating a map in go is
|
||||||
// randomized and we need consistent output for our unit tests.
|
// randomized and we need consistent output for our unit tests.
|
||||||
keys := make([]string, 0, len(m))
|
keys := make([]string, 0, len(m))
|
||||||
for k, _ := range m {
|
for k := range m {
|
||||||
keys = append(keys, k)
|
keys = append(keys, k)
|
||||||
}
|
}
|
||||||
sort.Strings(keys)
|
sort.Strings(keys)
|
||||||
|
|
|
@ -85,11 +85,11 @@ func TestBuildTags(t *testing.T) {
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
[]*telegraf.Tag{
|
[]*telegraf.Tag{
|
||||||
&telegraf.Tag{
|
{
|
||||||
Key: "one",
|
Key: "one",
|
||||||
Value: "two",
|
Value: "two",
|
||||||
},
|
},
|
||||||
&telegraf.Tag{
|
{
|
||||||
Key: "three",
|
Key: "three",
|
||||||
Value: "four",
|
Value: "four",
|
||||||
},
|
},
|
||||||
|
@ -98,7 +98,7 @@ func TestBuildTags(t *testing.T) {
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
[]*telegraf.Tag{
|
[]*telegraf.Tag{
|
||||||
&telegraf.Tag{
|
{
|
||||||
Key: "aaa",
|
Key: "aaa",
|
||||||
Value: "bbb",
|
Value: "bbb",
|
||||||
},
|
},
|
||||||
|
|
|
@ -244,7 +244,7 @@ func TestUDP_WriteWithRealConn(t *testing.T) {
|
||||||
go func() {
|
go func() {
|
||||||
defer wg.Done()
|
defer wg.Done()
|
||||||
var total int
|
var total int
|
||||||
for _, _ = range metrics {
|
for range metrics {
|
||||||
n, _, err := conn.ReadFrom(buf[total:])
|
n, _, err := conn.ReadFrom(buf[total:])
|
||||||
if err != nil {
|
if err != nil {
|
||||||
break
|
break
|
||||||
|
|
|
@ -154,7 +154,7 @@ func (p *PrometheusClient) Start() error {
|
||||||
}
|
}
|
||||||
|
|
||||||
registry := prometheus.NewRegistry()
|
registry := prometheus.NewRegistry()
|
||||||
for collector, _ := range defaultCollectors {
|
for collector := range defaultCollectors {
|
||||||
switch collector {
|
switch collector {
|
||||||
case "gocollector":
|
case "gocollector":
|
||||||
registry.Register(prometheus.NewGoCollector())
|
registry.Register(prometheus.NewGoCollector())
|
||||||
|
@ -236,7 +236,7 @@ func (p *PrometheusClient) Expire() {
|
||||||
for name, family := range p.fam {
|
for name, family := range p.fam {
|
||||||
for key, sample := range family.Samples {
|
for key, sample := range family.Samples {
|
||||||
if p.ExpirationInterval.Duration != 0 && now.After(sample.Expiration) {
|
if p.ExpirationInterval.Duration != 0 && now.After(sample.Expiration) {
|
||||||
for k, _ := range sample.Labels {
|
for k := range sample.Labels {
|
||||||
family.LabelSet[k]--
|
family.LabelSet[k]--
|
||||||
}
|
}
|
||||||
delete(family.Samples, key)
|
delete(family.Samples, key)
|
||||||
|
@ -323,7 +323,7 @@ func CreateSampleID(tags map[string]string) SampleID {
|
||||||
|
|
||||||
func addSample(fam *MetricFamily, sample *Sample, sampleID SampleID) {
|
func addSample(fam *MetricFamily, sample *Sample, sampleID SampleID) {
|
||||||
|
|
||||||
for k, _ := range sample.Labels {
|
for k := range sample.Labels {
|
||||||
fam.LabelSet[k]++
|
fam.LabelSet[k]++
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -33,7 +33,7 @@ type testCase struct {
|
||||||
|
|
||||||
var singleMetric = testCase{
|
var singleMetric = testCase{
|
||||||
[]api.ValueList{
|
[]api.ValueList{
|
||||||
api.ValueList{
|
{
|
||||||
Identifier: api.Identifier{
|
Identifier: api.Identifier{
|
||||||
Host: "xyzzy",
|
Host: "xyzzy",
|
||||||
Plugin: "cpu",
|
Plugin: "cpu",
|
||||||
|
@ -48,7 +48,7 @@ var singleMetric = testCase{
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
[]metricData{
|
[]metricData{
|
||||||
metricData{
|
{
|
||||||
"cpu_value",
|
"cpu_value",
|
||||||
map[string]string{
|
map[string]string{
|
||||||
"type_instance": "user",
|
"type_instance": "user",
|
||||||
|
@ -65,7 +65,7 @@ var singleMetric = testCase{
|
||||||
|
|
||||||
var multiMetric = testCase{
|
var multiMetric = testCase{
|
||||||
[]api.ValueList{
|
[]api.ValueList{
|
||||||
api.ValueList{
|
{
|
||||||
Identifier: api.Identifier{
|
Identifier: api.Identifier{
|
||||||
Host: "xyzzy",
|
Host: "xyzzy",
|
||||||
Plugin: "cpu",
|
Plugin: "cpu",
|
||||||
|
@ -81,7 +81,7 @@ var multiMetric = testCase{
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
[]metricData{
|
[]metricData{
|
||||||
metricData{
|
{
|
||||||
"cpu_0",
|
"cpu_0",
|
||||||
map[string]string{
|
map[string]string{
|
||||||
"type_instance": "user",
|
"type_instance": "user",
|
||||||
|
@ -93,7 +93,7 @@ var multiMetric = testCase{
|
||||||
"value": float64(42),
|
"value": float64(42),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
metricData{
|
{
|
||||||
"cpu_1",
|
"cpu_1",
|
||||||
map[string]string{
|
map[string]string{
|
||||||
"type_instance": "user",
|
"type_instance": "user",
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -53,7 +53,7 @@ func TestFieldConversions(t *testing.T) {
|
||||||
name: "Should change existing field to lowercase",
|
name: "Should change existing field to lowercase",
|
||||||
plugin: &Strings{
|
plugin: &Strings{
|
||||||
Lowercase: []converter{
|
Lowercase: []converter{
|
||||||
converter{
|
{
|
||||||
Field: "request",
|
Field: "request",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -68,7 +68,7 @@ func TestFieldConversions(t *testing.T) {
|
||||||
name: "Should change existing field to uppercase",
|
name: "Should change existing field to uppercase",
|
||||||
plugin: &Strings{
|
plugin: &Strings{
|
||||||
Uppercase: []converter{
|
Uppercase: []converter{
|
||||||
converter{
|
{
|
||||||
Field: "request",
|
Field: "request",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -83,7 +83,7 @@ func TestFieldConversions(t *testing.T) {
|
||||||
name: "Should add new lowercase field",
|
name: "Should add new lowercase field",
|
||||||
plugin: &Strings{
|
plugin: &Strings{
|
||||||
Lowercase: []converter{
|
Lowercase: []converter{
|
||||||
converter{
|
{
|
||||||
Field: "request",
|
Field: "request",
|
||||||
Dest: "lowercase_request",
|
Dest: "lowercase_request",
|
||||||
},
|
},
|
||||||
|
@ -103,7 +103,7 @@ func TestFieldConversions(t *testing.T) {
|
||||||
name: "Should trim from both sides",
|
name: "Should trim from both sides",
|
||||||
plugin: &Strings{
|
plugin: &Strings{
|
||||||
Trim: []converter{
|
Trim: []converter{
|
||||||
converter{
|
{
|
||||||
Field: "request",
|
Field: "request",
|
||||||
Cutset: "/w",
|
Cutset: "/w",
|
||||||
},
|
},
|
||||||
|
@ -119,13 +119,13 @@ func TestFieldConversions(t *testing.T) {
|
||||||
name: "Should trim from both sides and make lowercase",
|
name: "Should trim from both sides and make lowercase",
|
||||||
plugin: &Strings{
|
plugin: &Strings{
|
||||||
Trim: []converter{
|
Trim: []converter{
|
||||||
converter{
|
{
|
||||||
Field: "request",
|
Field: "request",
|
||||||
Cutset: "/w",
|
Cutset: "/w",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Lowercase: []converter{
|
Lowercase: []converter{
|
||||||
converter{
|
{
|
||||||
Field: "request",
|
Field: "request",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -140,7 +140,7 @@ func TestFieldConversions(t *testing.T) {
|
||||||
name: "Should trim from left side",
|
name: "Should trim from left side",
|
||||||
plugin: &Strings{
|
plugin: &Strings{
|
||||||
TrimLeft: []converter{
|
TrimLeft: []converter{
|
||||||
converter{
|
{
|
||||||
Field: "request",
|
Field: "request",
|
||||||
Cutset: "/w",
|
Cutset: "/w",
|
||||||
},
|
},
|
||||||
|
@ -156,7 +156,7 @@ func TestFieldConversions(t *testing.T) {
|
||||||
name: "Should trim from right side",
|
name: "Should trim from right side",
|
||||||
plugin: &Strings{
|
plugin: &Strings{
|
||||||
TrimRight: []converter{
|
TrimRight: []converter{
|
||||||
converter{
|
{
|
||||||
Field: "request",
|
Field: "request",
|
||||||
Cutset: "/w",
|
Cutset: "/w",
|
||||||
},
|
},
|
||||||
|
@ -172,7 +172,7 @@ func TestFieldConversions(t *testing.T) {
|
||||||
name: "Should trim prefix '/mixed'",
|
name: "Should trim prefix '/mixed'",
|
||||||
plugin: &Strings{
|
plugin: &Strings{
|
||||||
TrimPrefix: []converter{
|
TrimPrefix: []converter{
|
||||||
converter{
|
{
|
||||||
Field: "request",
|
Field: "request",
|
||||||
Prefix: "/mixed",
|
Prefix: "/mixed",
|
||||||
},
|
},
|
||||||
|
@ -188,7 +188,7 @@ func TestFieldConversions(t *testing.T) {
|
||||||
name: "Should trim suffix '-1D&to=now'",
|
name: "Should trim suffix '-1D&to=now'",
|
||||||
plugin: &Strings{
|
plugin: &Strings{
|
||||||
TrimSuffix: []converter{
|
TrimSuffix: []converter{
|
||||||
converter{
|
{
|
||||||
Field: "request",
|
Field: "request",
|
||||||
Suffix: "-1D&to=now",
|
Suffix: "-1D&to=now",
|
||||||
},
|
},
|
||||||
|
@ -204,7 +204,7 @@ func TestFieldConversions(t *testing.T) {
|
||||||
name: "Trim without cutset removes whitespace",
|
name: "Trim without cutset removes whitespace",
|
||||||
plugin: &Strings{
|
plugin: &Strings{
|
||||||
Trim: []converter{
|
Trim: []converter{
|
||||||
converter{
|
{
|
||||||
Field: "whitespace",
|
Field: "whitespace",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -219,7 +219,7 @@ func TestFieldConversions(t *testing.T) {
|
||||||
name: "Trim left without cutset removes whitespace",
|
name: "Trim left without cutset removes whitespace",
|
||||||
plugin: &Strings{
|
plugin: &Strings{
|
||||||
TrimLeft: []converter{
|
TrimLeft: []converter{
|
||||||
converter{
|
{
|
||||||
Field: "whitespace",
|
Field: "whitespace",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -234,7 +234,7 @@ func TestFieldConversions(t *testing.T) {
|
||||||
name: "Trim right without cutset removes whitespace",
|
name: "Trim right without cutset removes whitespace",
|
||||||
plugin: &Strings{
|
plugin: &Strings{
|
||||||
TrimRight: []converter{
|
TrimRight: []converter{
|
||||||
converter{
|
{
|
||||||
Field: "whitespace",
|
Field: "whitespace",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -249,7 +249,7 @@ func TestFieldConversions(t *testing.T) {
|
||||||
name: "No change if field missing",
|
name: "No change if field missing",
|
||||||
plugin: &Strings{
|
plugin: &Strings{
|
||||||
Lowercase: []converter{
|
Lowercase: []converter{
|
||||||
converter{
|
{
|
||||||
Field: "xyzzy",
|
Field: "xyzzy",
|
||||||
Suffix: "-1D&to=now",
|
Suffix: "-1D&to=now",
|
||||||
},
|
},
|
||||||
|
@ -281,7 +281,7 @@ func TestTagConversions(t *testing.T) {
|
||||||
name: "Should change existing tag to lowercase",
|
name: "Should change existing tag to lowercase",
|
||||||
plugin: &Strings{
|
plugin: &Strings{
|
||||||
Lowercase: []converter{
|
Lowercase: []converter{
|
||||||
converter{
|
{
|
||||||
Tag: "s-computername",
|
Tag: "s-computername",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -300,7 +300,7 @@ func TestTagConversions(t *testing.T) {
|
||||||
name: "Should add new lowercase tag",
|
name: "Should add new lowercase tag",
|
||||||
plugin: &Strings{
|
plugin: &Strings{
|
||||||
Lowercase: []converter{
|
Lowercase: []converter{
|
||||||
converter{
|
{
|
||||||
Tag: "s-computername",
|
Tag: "s-computername",
|
||||||
Dest: "s-computername_lowercase",
|
Dest: "s-computername_lowercase",
|
||||||
},
|
},
|
||||||
|
@ -324,7 +324,7 @@ func TestTagConversions(t *testing.T) {
|
||||||
name: "Should add new uppercase tag",
|
name: "Should add new uppercase tag",
|
||||||
plugin: &Strings{
|
plugin: &Strings{
|
||||||
Uppercase: []converter{
|
Uppercase: []converter{
|
||||||
converter{
|
{
|
||||||
Tag: "s-computername",
|
Tag: "s-computername",
|
||||||
Dest: "s-computername_uppercase",
|
Dest: "s-computername_uppercase",
|
||||||
},
|
},
|
||||||
|
@ -365,7 +365,7 @@ func TestMeasurementConversions(t *testing.T) {
|
||||||
name: "lowercase measurement",
|
name: "lowercase measurement",
|
||||||
plugin: &Strings{
|
plugin: &Strings{
|
||||||
Lowercase: []converter{
|
Lowercase: []converter{
|
||||||
converter{
|
{
|
||||||
Measurement: "IIS_log",
|
Measurement: "IIS_log",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -388,19 +388,19 @@ func TestMeasurementConversions(t *testing.T) {
|
||||||
func TestMultipleConversions(t *testing.T) {
|
func TestMultipleConversions(t *testing.T) {
|
||||||
plugin := &Strings{
|
plugin := &Strings{
|
||||||
Lowercase: []converter{
|
Lowercase: []converter{
|
||||||
converter{
|
{
|
||||||
Tag: "s-computername",
|
Tag: "s-computername",
|
||||||
},
|
},
|
||||||
converter{
|
{
|
||||||
Field: "request",
|
Field: "request",
|
||||||
},
|
},
|
||||||
converter{
|
{
|
||||||
Field: "cs-host",
|
Field: "cs-host",
|
||||||
Dest: "cs-host_lowercase",
|
Dest: "cs-host_lowercase",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Uppercase: []converter{
|
Uppercase: []converter{
|
||||||
converter{
|
{
|
||||||
Tag: "verb",
|
Tag: "verb",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -428,18 +428,18 @@ func TestMultipleConversions(t *testing.T) {
|
||||||
func TestReadmeExample(t *testing.T) {
|
func TestReadmeExample(t *testing.T) {
|
||||||
plugin := &Strings{
|
plugin := &Strings{
|
||||||
Lowercase: []converter{
|
Lowercase: []converter{
|
||||||
converter{
|
{
|
||||||
Tag: "uri_stem",
|
Tag: "uri_stem",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
TrimPrefix: []converter{
|
TrimPrefix: []converter{
|
||||||
converter{
|
{
|
||||||
Tag: "uri_stem",
|
Tag: "uri_stem",
|
||||||
Prefix: "/api/",
|
Prefix: "/api/",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Uppercase: []converter{
|
Uppercase: []converter{
|
||||||
converter{
|
{
|
||||||
Field: "cs-host",
|
Field: "cs-host",
|
||||||
Dest: "cs-host_normalised",
|
Dest: "cs-host_normalised",
|
||||||
},
|
},
|
||||||
|
@ -492,7 +492,7 @@ func newMetric(name string) telegraf.Metric {
|
||||||
func TestMeasurementReplace(t *testing.T) {
|
func TestMeasurementReplace(t *testing.T) {
|
||||||
plugin := &Strings{
|
plugin := &Strings{
|
||||||
Replace: []converter{
|
Replace: []converter{
|
||||||
converter{
|
{
|
||||||
Old: "_",
|
Old: "_",
|
||||||
New: "-",
|
New: "-",
|
||||||
Measurement: "*",
|
Measurement: "*",
|
||||||
|
@ -513,7 +513,7 @@ func TestMeasurementReplace(t *testing.T) {
|
||||||
func TestMeasurementCharDeletion(t *testing.T) {
|
func TestMeasurementCharDeletion(t *testing.T) {
|
||||||
plugin := &Strings{
|
plugin := &Strings{
|
||||||
Replace: []converter{
|
Replace: []converter{
|
||||||
converter{
|
{
|
||||||
Old: "foo",
|
Old: "foo",
|
||||||
New: "",
|
New: "",
|
||||||
Measurement: "*",
|
Measurement: "*",
|
||||||
|
|
|
@ -405,7 +405,7 @@ func (t *TopK) getAggregationFunction(aggOperation string) (func([]telegraf.Metr
|
||||||
}
|
}
|
||||||
// Divide by the number of recorded measurements collected for every field
|
// Divide by the number of recorded measurements collected for every field
|
||||||
noMeasurementsFound := true // Canary to check if no field with values was found, so we can return nil
|
noMeasurementsFound := true // Canary to check if no field with values was found, so we can return nil
|
||||||
for k, _ := range mean {
|
for k := range mean {
|
||||||
if meanCounters[k] == 0 {
|
if meanCounters[k] == 0 {
|
||||||
mean[k] = 0
|
mean[k] = 0
|
||||||
continue
|
continue
|
||||||
|
|
|
@ -178,11 +178,11 @@ func TestTopkMeanAddAggregateFields(t *testing.T) {
|
||||||
// Generate the answer
|
// Generate the answer
|
||||||
chng := fieldList(field{"a_topk_aggregate", float64(28.044)})
|
chng := fieldList(field{"a_topk_aggregate", float64(28.044)})
|
||||||
changeSet := map[int]metricChange{
|
changeSet := map[int]metricChange{
|
||||||
0: metricChange{newFields: chng},
|
0: {newFields: chng},
|
||||||
1: metricChange{newFields: chng},
|
1: {newFields: chng},
|
||||||
2: metricChange{newFields: chng},
|
2: {newFields: chng},
|
||||||
3: metricChange{newFields: chng},
|
3: {newFields: chng},
|
||||||
4: metricChange{newFields: chng},
|
4: {newFields: chng},
|
||||||
}
|
}
|
||||||
answer := generateAns(input, changeSet)
|
answer := generateAns(input, changeSet)
|
||||||
|
|
||||||
|
@ -208,11 +208,11 @@ func TestTopkSumAddAggregateFields(t *testing.T) {
|
||||||
// Generate the answer
|
// Generate the answer
|
||||||
chng := fieldList(field{"a_topk_aggregate", float64(140.22)})
|
chng := fieldList(field{"a_topk_aggregate", float64(140.22)})
|
||||||
changeSet := map[int]metricChange{
|
changeSet := map[int]metricChange{
|
||||||
0: metricChange{newFields: chng},
|
0: {newFields: chng},
|
||||||
1: metricChange{newFields: chng},
|
1: {newFields: chng},
|
||||||
2: metricChange{newFields: chng},
|
2: {newFields: chng},
|
||||||
3: metricChange{newFields: chng},
|
3: {newFields: chng},
|
||||||
4: metricChange{newFields: chng},
|
4: {newFields: chng},
|
||||||
}
|
}
|
||||||
answer := generateAns(input, changeSet)
|
answer := generateAns(input, changeSet)
|
||||||
|
|
||||||
|
@ -238,11 +238,11 @@ func TestTopkMaxAddAggregateFields(t *testing.T) {
|
||||||
// Generate the answer
|
// Generate the answer
|
||||||
chng := fieldList(field{"a_topk_aggregate", float64(50.5)})
|
chng := fieldList(field{"a_topk_aggregate", float64(50.5)})
|
||||||
changeSet := map[int]metricChange{
|
changeSet := map[int]metricChange{
|
||||||
0: metricChange{newFields: chng},
|
0: {newFields: chng},
|
||||||
1: metricChange{newFields: chng},
|
1: {newFields: chng},
|
||||||
2: metricChange{newFields: chng},
|
2: {newFields: chng},
|
||||||
3: metricChange{newFields: chng},
|
3: {newFields: chng},
|
||||||
4: metricChange{newFields: chng},
|
4: {newFields: chng},
|
||||||
}
|
}
|
||||||
answer := generateAns(input, changeSet)
|
answer := generateAns(input, changeSet)
|
||||||
|
|
||||||
|
@ -268,11 +268,11 @@ func TestTopkMinAddAggregateFields(t *testing.T) {
|
||||||
// Generate the answer
|
// Generate the answer
|
||||||
chng := fieldList(field{"a_topk_aggregate", float64(0.3)})
|
chng := fieldList(field{"a_topk_aggregate", float64(0.3)})
|
||||||
changeSet := map[int]metricChange{
|
changeSet := map[int]metricChange{
|
||||||
0: metricChange{newFields: chng},
|
0: {newFields: chng},
|
||||||
1: metricChange{newFields: chng},
|
1: {newFields: chng},
|
||||||
2: metricChange{newFields: chng},
|
2: {newFields: chng},
|
||||||
3: metricChange{newFields: chng},
|
3: {newFields: chng},
|
||||||
4: metricChange{newFields: chng},
|
4: {newFields: chng},
|
||||||
}
|
}
|
||||||
answer := generateAns(input, changeSet)
|
answer := generateAns(input, changeSet)
|
||||||
|
|
||||||
|
@ -297,10 +297,10 @@ func TestTopkGroupby1(t *testing.T) {
|
||||||
|
|
||||||
// Generate the answer
|
// Generate the answer
|
||||||
changeSet := map[int]metricChange{
|
changeSet := map[int]metricChange{
|
||||||
2: metricChange{newFields: fieldList(field{"value_topk_aggregate", float64(74.18)})},
|
2: {newFields: fieldList(field{"value_topk_aggregate", float64(74.18)})},
|
||||||
3: metricChange{newFields: fieldList(field{"value_topk_aggregate", float64(72)})},
|
3: {newFields: fieldList(field{"value_topk_aggregate", float64(72)})},
|
||||||
4: metricChange{newFields: fieldList(field{"value_topk_aggregate", float64(163.22)})},
|
4: {newFields: fieldList(field{"value_topk_aggregate", float64(163.22)})},
|
||||||
5: metricChange{newFields: fieldList(field{"value_topk_aggregate", float64(163.22)})},
|
5: {newFields: fieldList(field{"value_topk_aggregate", float64(163.22)})},
|
||||||
}
|
}
|
||||||
answer := generateAns(input, changeSet)
|
answer := generateAns(input, changeSet)
|
||||||
|
|
||||||
|
@ -326,11 +326,11 @@ func TestTopkGroupby2(t *testing.T) {
|
||||||
chng2 := fieldList(field{"value_topk_aggregate", float64(72)})
|
chng2 := fieldList(field{"value_topk_aggregate", float64(72)})
|
||||||
chng3 := fieldList(field{"value_topk_aggregate", float64(81.61)})
|
chng3 := fieldList(field{"value_topk_aggregate", float64(81.61)})
|
||||||
changeSet := map[int]metricChange{
|
changeSet := map[int]metricChange{
|
||||||
1: metricChange{newFields: chng1},
|
1: {newFields: chng1},
|
||||||
2: metricChange{newFields: chng1},
|
2: {newFields: chng1},
|
||||||
3: metricChange{newFields: chng2},
|
3: {newFields: chng2},
|
||||||
4: metricChange{newFields: chng3},
|
4: {newFields: chng3},
|
||||||
5: metricChange{newFields: chng3},
|
5: {newFields: chng3},
|
||||||
}
|
}
|
||||||
answer := generateAns(input, changeSet)
|
answer := generateAns(input, changeSet)
|
||||||
|
|
||||||
|
@ -354,8 +354,8 @@ func TestTopkGroupby3(t *testing.T) {
|
||||||
// Generate the answer
|
// Generate the answer
|
||||||
chng := fieldList(field{"value_topk_aggregate", float64(75.3)})
|
chng := fieldList(field{"value_topk_aggregate", float64(75.3)})
|
||||||
changeSet := map[int]metricChange{
|
changeSet := map[int]metricChange{
|
||||||
4: metricChange{newFields: chng},
|
4: {newFields: chng},
|
||||||
5: metricChange{newFields: chng},
|
5: {newFields: chng},
|
||||||
}
|
}
|
||||||
answer := generateAns(input, changeSet)
|
answer := generateAns(input, changeSet)
|
||||||
|
|
||||||
|
@ -381,10 +381,10 @@ func TestTopkGroupbyFields1(t *testing.T) {
|
||||||
|
|
||||||
// Generate the answer
|
// Generate the answer
|
||||||
changeSet := map[int]metricChange{
|
changeSet := map[int]metricChange{
|
||||||
0: metricChange{newFields: fieldList(field{"A_topk_aggregate", float64(95.36)})},
|
0: {newFields: fieldList(field{"A_topk_aggregate", float64(95.36)})},
|
||||||
1: metricChange{newFields: fieldList(field{"A_topk_aggregate", float64(39.01)})},
|
1: {newFields: fieldList(field{"A_topk_aggregate", float64(39.01)})},
|
||||||
2: metricChange{newFields: fieldList(field{"A_topk_aggregate", float64(39.01)})},
|
2: {newFields: fieldList(field{"A_topk_aggregate", float64(39.01)})},
|
||||||
5: metricChange{newFields: fieldList(field{"A_topk_aggregate", float64(29.45)})},
|
5: {newFields: fieldList(field{"A_topk_aggregate", float64(29.45)})},
|
||||||
}
|
}
|
||||||
answer := generateAns(input, changeSet)
|
answer := generateAns(input, changeSet)
|
||||||
|
|
||||||
|
@ -409,10 +409,10 @@ func TestTopkGroupbyFields2(t *testing.T) {
|
||||||
|
|
||||||
// Generate the answer
|
// Generate the answer
|
||||||
changeSet := map[int]metricChange{
|
changeSet := map[int]metricChange{
|
||||||
0: metricChange{newFields: fieldList(field{"C_topk_aggregate", float64(72.41)})},
|
0: {newFields: fieldList(field{"C_topk_aggregate", float64(72.41)})},
|
||||||
2: metricChange{newFields: fieldList(field{"B_topk_aggregate", float64(60.96)})},
|
2: {newFields: fieldList(field{"B_topk_aggregate", float64(60.96)})},
|
||||||
4: metricChange{newFields: fieldList(field{"B_topk_aggregate", float64(81.55)}, field{"C_topk_aggregate", float64(49.96)})},
|
4: {newFields: fieldList(field{"B_topk_aggregate", float64(81.55)}, field{"C_topk_aggregate", float64(49.96)})},
|
||||||
5: metricChange{newFields: fieldList(field{"C_topk_aggregate", float64(49.96)})},
|
5: {newFields: fieldList(field{"C_topk_aggregate", float64(49.96)})},
|
||||||
}
|
}
|
||||||
answer := generateAns(input, changeSet)
|
answer := generateAns(input, changeSet)
|
||||||
|
|
||||||
|
@ -438,9 +438,9 @@ func TestTopkGroupbyMetricName1(t *testing.T) {
|
||||||
// Generate the answer
|
// Generate the answer
|
||||||
chng := fieldList(field{"value_topk_aggregate", float64(235.22000000000003)})
|
chng := fieldList(field{"value_topk_aggregate", float64(235.22000000000003)})
|
||||||
changeSet := map[int]metricChange{
|
changeSet := map[int]metricChange{
|
||||||
3: metricChange{newFields: chng},
|
3: {newFields: chng},
|
||||||
4: metricChange{newFields: chng},
|
4: {newFields: chng},
|
||||||
5: metricChange{newFields: chng},
|
5: {newFields: chng},
|
||||||
}
|
}
|
||||||
answer := generateAns(input, changeSet)
|
answer := generateAns(input, changeSet)
|
||||||
|
|
||||||
|
@ -465,10 +465,10 @@ func TestTopkGroupbyMetricName2(t *testing.T) {
|
||||||
|
|
||||||
// Generate the answer
|
// Generate the answer
|
||||||
changeSet := map[int]metricChange{
|
changeSet := map[int]metricChange{
|
||||||
0: metricChange{newFields: fieldList(field{"A_topk_aggregate", float64(95.36)})},
|
0: {newFields: fieldList(field{"A_topk_aggregate", float64(95.36)})},
|
||||||
1: metricChange{newFields: fieldList(field{"A_topk_aggregate", float64(78.02)}, field{"value_topk_aggregate", float64(133.61)})},
|
1: {newFields: fieldList(field{"A_topk_aggregate", float64(78.02)}, field{"value_topk_aggregate", float64(133.61)})},
|
||||||
2: metricChange{newFields: fieldList(field{"A_topk_aggregate", float64(78.02)}, field{"value_topk_aggregate", float64(133.61)})},
|
2: {newFields: fieldList(field{"A_topk_aggregate", float64(78.02)}, field{"value_topk_aggregate", float64(133.61)})},
|
||||||
4: metricChange{newFields: fieldList(field{"value_topk_aggregate", float64(87.92)})},
|
4: {newFields: fieldList(field{"value_topk_aggregate", float64(87.92)})},
|
||||||
}
|
}
|
||||||
answer := generateAns(input, changeSet)
|
answer := generateAns(input, changeSet)
|
||||||
|
|
||||||
|
@ -493,9 +493,9 @@ func TestTopkBottomk(t *testing.T) {
|
||||||
|
|
||||||
// Generate the answer
|
// Generate the answer
|
||||||
changeSet := map[int]metricChange{
|
changeSet := map[int]metricChange{
|
||||||
0: metricChange{},
|
0: {},
|
||||||
1: metricChange{},
|
1: {},
|
||||||
3: metricChange{},
|
3: {},
|
||||||
}
|
}
|
||||||
answer := generateAns(input, changeSet)
|
answer := generateAns(input, changeSet)
|
||||||
|
|
||||||
|
@ -520,10 +520,10 @@ func TestTopkGroupByKeyTag(t *testing.T) {
|
||||||
|
|
||||||
// Generate the answer
|
// Generate the answer
|
||||||
changeSet := map[int]metricChange{
|
changeSet := map[int]metricChange{
|
||||||
2: metricChange{newTags: tagList(tag{"gbt", "metric1&tag1=TWO&tag3=SIX&"})},
|
2: {newTags: tagList(tag{"gbt", "metric1&tag1=TWO&tag3=SIX&"})},
|
||||||
3: metricChange{newTags: tagList(tag{"gbt", "metric2&tag1=ONE&tag3=THREE&"})},
|
3: {newTags: tagList(tag{"gbt", "metric2&tag1=ONE&tag3=THREE&"})},
|
||||||
4: metricChange{newTags: tagList(tag{"gbt", "metric2&tag1=TWO&tag3=SEVEN&"})},
|
4: {newTags: tagList(tag{"gbt", "metric2&tag1=TWO&tag3=SEVEN&"})},
|
||||||
5: metricChange{newTags: tagList(tag{"gbt", "metric2&tag1=TWO&tag3=SEVEN&"})},
|
5: {newTags: tagList(tag{"gbt", "metric2&tag1=TWO&tag3=SEVEN&"})},
|
||||||
}
|
}
|
||||||
answer := generateAns(input, changeSet)
|
answer := generateAns(input, changeSet)
|
||||||
|
|
||||||
|
|
|
@ -237,7 +237,7 @@ func (a *Accumulator) NFields() int {
|
||||||
defer a.Unlock()
|
defer a.Unlock()
|
||||||
counter := 0
|
counter := 0
|
||||||
for _, pt := range a.Metrics {
|
for _, pt := range a.Metrics {
|
||||||
for _, _ = range pt.Fields {
|
for range pt.Fields {
|
||||||
counter++
|
counter++
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue