committed by
Cameron Sparr
parent
20b4e8c779
commit
8f09aadfdf
102
plugins/parsers/nagios/parser.go
Normal file
102
plugins/parsers/nagios/parser.go
Normal file
@@ -0,0 +1,102 @@
|
||||
package nagios
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/influxdata/telegraf"
|
||||
)
|
||||
|
||||
type NagiosParser struct {
|
||||
MetricName string
|
||||
DefaultTags map[string]string
|
||||
}
|
||||
|
||||
// Got from Alignak
|
||||
// https://github.com/Alignak-monitoring/alignak/blob/develop/alignak/misc/perfdata.py
|
||||
var perfSplitRegExp, _ = regexp.Compile(`([^=]+=\S+)`)
|
||||
var nagiosRegExp, _ = regexp.Compile(`^([^=]+)=([\d\.\-\+eE]+)([\w\/%]*);?([\d\.\-\+eE:~@]+)?;?([\d\.\-\+eE:~@]+)?;?([\d\.\-\+eE]+)?;?([\d\.\-\+eE]+)?;?\s*`)
|
||||
|
||||
func (p *NagiosParser) ParseLine(line string) (telegraf.Metric, error) {
|
||||
metrics, err := p.Parse([]byte(line))
|
||||
return metrics[0], err
|
||||
}
|
||||
|
||||
func (p *NagiosParser) SetDefaultTags(tags map[string]string) {
|
||||
p.DefaultTags = tags
|
||||
}
|
||||
|
||||
//> rta,host=absol,unit=ms critical=6000,min=0,value=0.332,warning=4000 1456374625003628099
|
||||
//> pl,host=absol,unit=% critical=90,min=0,value=0,warning=80 1456374625003693967
|
||||
|
||||
func (p *NagiosParser) Parse(buf []byte) ([]telegraf.Metric, error) {
|
||||
metrics := make([]telegraf.Metric, 0)
|
||||
// Convert to string
|
||||
out := string(buf)
|
||||
// Prepare output for splitting
|
||||
// Delete escaped pipes
|
||||
out = strings.Replace(out, `\|`, "___PROTECT_PIPE___", -1)
|
||||
// Split lines and get the first one
|
||||
lines := strings.Split(out, "\n")
|
||||
// Split output and perfdatas
|
||||
data_splitted := strings.Split(lines[0], "|")
|
||||
if len(data_splitted) <= 1 {
|
||||
// No pipe == no perf data
|
||||
return nil, nil
|
||||
}
|
||||
// Get perfdatas
|
||||
perfdatas := data_splitted[1]
|
||||
// Add escaped pipes
|
||||
perfdatas = strings.Replace(perfdatas, "___PROTECT_PIPE___", `\|`, -1)
|
||||
// Split perfs
|
||||
unParsedPerfs := perfSplitRegExp.FindAllSubmatch([]byte(perfdatas), -1)
|
||||
// Iterate on all perfs
|
||||
for _, unParsedPerfs := range unParsedPerfs {
|
||||
// Get metrics
|
||||
// Trim perf
|
||||
trimedPerf := strings.Trim(string(unParsedPerfs[0]), " ")
|
||||
// Parse perf
|
||||
perf := nagiosRegExp.FindAllSubmatch([]byte(trimedPerf), -1)
|
||||
// Bad string
|
||||
if len(perf) == 0 {
|
||||
continue
|
||||
}
|
||||
if len(perf[0]) <= 2 {
|
||||
continue
|
||||
}
|
||||
if perf[0][1] == nil || perf[0][2] == nil {
|
||||
continue
|
||||
}
|
||||
fieldName := string(perf[0][1])
|
||||
tags := make(map[string]string)
|
||||
if perf[0][3] != nil {
|
||||
tags["unit"] = string(perf[0][3])
|
||||
}
|
||||
fields := make(map[string]interface{})
|
||||
fields["value"] = perf[0][2]
|
||||
// TODO should we set empty field
|
||||
// if metric if there is no data ?
|
||||
if perf[0][4] != nil {
|
||||
fields["warning"] = perf[0][4]
|
||||
}
|
||||
if perf[0][5] != nil {
|
||||
fields["critical"] = perf[0][5]
|
||||
}
|
||||
if perf[0][6] != nil {
|
||||
fields["min"] = perf[0][6]
|
||||
}
|
||||
if perf[0][7] != nil {
|
||||
fields["max"] = perf[0][7]
|
||||
}
|
||||
// Create metric
|
||||
metric, err := telegraf.NewMetric(fieldName, tags, fields, time.Now().UTC())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// Add Metric
|
||||
metrics = append(metrics, metric)
|
||||
}
|
||||
|
||||
return metrics, nil
|
||||
}
|
||||
89
plugins/parsers/nagios/parser_test.go
Normal file
89
plugins/parsers/nagios/parser_test.go
Normal file
@@ -0,0 +1,89 @@
|
||||
package nagios
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
const validOutput1 = `PING OK - Packet loss = 0%, RTA = 0.30 ms|rta=0.298000ms;4000.000000;6000.000000;0.000000 pl=0%;80;90;0;100
|
||||
This is a long output
|
||||
with three lines
|
||||
`
|
||||
const validOutput2 = "TCP OK - 0.008 second response time on port 80|time=0.008457s;;;0.000000;10.000000"
|
||||
const validOutput3 = "TCP OK - 0.008 second response time on port 80|time=0.008457"
|
||||
const invalidOutput3 = "PING OK - Packet loss = 0%, RTA = 0.30 ms"
|
||||
const invalidOutput4 = "PING OK - Packet loss = 0%, RTA = 0.30 ms| =3;;;; dgasdg =;;;; sff=;;;;"
|
||||
|
||||
func TestParseValidOutput(t *testing.T) {
|
||||
parser := NagiosParser{
|
||||
MetricName: "nagios_test",
|
||||
}
|
||||
|
||||
// Output1
|
||||
metrics, err := parser.Parse([]byte(validOutput1))
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, metrics, 2)
|
||||
// rta
|
||||
assert.Equal(t, "rta", metrics[0].Name())
|
||||
assert.Equal(t, map[string]interface{}{
|
||||
"value": float64(0.298),
|
||||
"warning": float64(4000),
|
||||
"critical": float64(6000),
|
||||
"min": float64(0),
|
||||
}, metrics[0].Fields())
|
||||
assert.Equal(t, map[string]string{"unit": "ms"}, metrics[0].Tags())
|
||||
// pl
|
||||
assert.Equal(t, "pl", metrics[1].Name())
|
||||
assert.Equal(t, map[string]interface{}{
|
||||
"value": float64(0),
|
||||
"warning": float64(80),
|
||||
"critical": float64(90),
|
||||
"min": float64(0),
|
||||
"max": float64(100),
|
||||
}, metrics[1].Fields())
|
||||
assert.Equal(t, map[string]string{"unit": "%"}, metrics[1].Tags())
|
||||
|
||||
// Output2
|
||||
metrics, err = parser.Parse([]byte(validOutput2))
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, metrics, 1)
|
||||
// time
|
||||
assert.Equal(t, "time", metrics[0].Name())
|
||||
assert.Equal(t, map[string]interface{}{
|
||||
"value": float64(0.008457),
|
||||
"min": float64(0),
|
||||
"max": float64(10),
|
||||
}, metrics[0].Fields())
|
||||
assert.Equal(t, map[string]string{"unit": "s"}, metrics[0].Tags())
|
||||
|
||||
// Output3
|
||||
metrics, err = parser.Parse([]byte(validOutput3))
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, metrics, 1)
|
||||
// time
|
||||
assert.Equal(t, "time", metrics[0].Name())
|
||||
assert.Equal(t, map[string]interface{}{
|
||||
"value": float64(0.008457),
|
||||
}, metrics[0].Fields())
|
||||
assert.Equal(t, map[string]string{}, metrics[0].Tags())
|
||||
|
||||
}
|
||||
|
||||
func TestParseInvalidOutput(t *testing.T) {
|
||||
parser := NagiosParser{
|
||||
MetricName: "nagios_test",
|
||||
}
|
||||
|
||||
// invalidOutput3
|
||||
metrics, err := parser.Parse([]byte(invalidOutput3))
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, metrics, 0)
|
||||
|
||||
// invalidOutput4
|
||||
metrics, err = parser.Parse([]byte(invalidOutput4))
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, metrics, 0)
|
||||
|
||||
}
|
||||
@@ -8,6 +8,7 @@ import (
|
||||
"github.com/influxdata/telegraf/plugins/parsers/graphite"
|
||||
"github.com/influxdata/telegraf/plugins/parsers/influx"
|
||||
"github.com/influxdata/telegraf/plugins/parsers/json"
|
||||
"github.com/influxdata/telegraf/plugins/parsers/nagios"
|
||||
"github.com/influxdata/telegraf/plugins/parsers/value"
|
||||
)
|
||||
|
||||
@@ -39,7 +40,7 @@ type Parser interface {
|
||||
// Config is a struct that covers the data types needed for all parser types,
|
||||
// and can be used to instantiate _any_ of the parsers.
|
||||
type Config struct {
|
||||
// Dataformat can be one of: json, influx, graphite, value
|
||||
// Dataformat can be one of: json, influx, graphite, value, nagios
|
||||
DataFormat string
|
||||
|
||||
// Separator only applied to Graphite data.
|
||||
@@ -72,6 +73,8 @@ func NewParser(config *Config) (Parser, error) {
|
||||
config.DataType, config.DefaultTags)
|
||||
case "influx":
|
||||
parser, err = NewInfluxParser()
|
||||
case "nagios":
|
||||
parser, err = NewNagiosParser()
|
||||
case "graphite":
|
||||
parser, err = NewGraphiteParser(config.Separator,
|
||||
config.Templates, config.DefaultTags)
|
||||
@@ -94,6 +97,10 @@ func NewJSONParser(
|
||||
return parser, nil
|
||||
}
|
||||
|
||||
func NewNagiosParser() (Parser, error) {
|
||||
return &nagios.NagiosParser{}, nil
|
||||
}
|
||||
|
||||
func NewInfluxParser() (Parser, error) {
|
||||
return &influx.InfluxParser{}, nil
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user