Implementing generic parser plugins and documentation
This constitutes a large change in how we will parse different data formats going forward (for the plugins that support it) This is working off @henrypfhu's changes.
This commit is contained in:
@@ -8,8 +8,8 @@ The exec plugin can execute arbitrary commands which output:
|
||||
|
||||
> Graphite understands messages with this format:
|
||||
|
||||
> ```
|
||||
metric_path value timestamp\n
|
||||
> ```
|
||||
metric_path value timestamp\n
|
||||
```
|
||||
|
||||
> __metric_path__ is the metric namespace that you want to populate.
|
||||
@@ -28,10 +28,7 @@ and strings will be ignored.
|
||||
# Read flattened metrics from one or more commands that output JSON to stdout
|
||||
[[inputs.exec]]
|
||||
# Shell/commands array
|
||||
# compatible with old version
|
||||
# we can still use the old command configuration
|
||||
# command = "/usr/bin/mycollector --foo=bar"
|
||||
commands = ["/tmp/test.sh","/tmp/test2.sh"]
|
||||
commands = ["/tmp/test.sh", "/tmp/test2.sh"]
|
||||
|
||||
# Data format to consume. This can be "json", "influx" or "graphite" (line-protocol)
|
||||
# NOTE json only reads numerical measurements, strings and booleans are ignored.
|
||||
@@ -128,7 +125,7 @@ and usage_busy. They will receive a timestamp at collection time.
|
||||
We can also change the data_format to "graphite" to use the metrics collecting scripts such as (compatible with graphite):
|
||||
|
||||
* Nagios [Mertics Plugins] (https://exchange.nagios.org/directory/Plugins)
|
||||
* Sensu [Mertics Plugins] (https://github.com/sensu-plugins)
|
||||
* Sensu [Mertics Plugins] (https://github.com/sensu-plugins)
|
||||
|
||||
#### Configuration
|
||||
```
|
||||
@@ -180,4 +177,4 @@ sensu.metric.net.server0.eth0.rx_dropped 0 1444234982
|
||||
The templates configuration will be used to parse the graphite metrics to support influxdb/opentsdb tagging store engines.
|
||||
|
||||
More detail information about templates, please refer to [The graphite Input] (https://github.com/influxdata/influxdb/blob/master/services/graphite/README.md)
|
||||
|
||||
|
||||
|
||||
@@ -9,66 +9,40 @@ import (
|
||||
"github.com/gonuts/go-shellquote"
|
||||
|
||||
"github.com/influxdata/telegraf"
|
||||
"github.com/influxdata/telegraf/internal/encoding"
|
||||
"github.com/influxdata/telegraf/plugins/inputs"
|
||||
|
||||
_ "github.com/influxdata/telegraf/internal/encoding/graphite"
|
||||
_ "github.com/influxdata/telegraf/internal/encoding/influx"
|
||||
_ "github.com/influxdata/telegraf/internal/encoding/json"
|
||||
"github.com/influxdata/telegraf/plugins/parsers"
|
||||
)
|
||||
|
||||
const sampleConfig = `
|
||||
# Shell/commands array
|
||||
# compatible with old version
|
||||
# we can still use the old command configuration
|
||||
# command = "/usr/bin/mycollector --foo=bar"
|
||||
commands = ["/tmp/test.sh","/tmp/test2.sh"]
|
||||
### Commands array
|
||||
commands = ["/tmp/test.sh", "/usr/bin/mycollector --foo=bar"]
|
||||
|
||||
# Data format to consume. This can be "json", "influx" or "graphite" (line-protocol)
|
||||
# NOTE json only reads numerical measurements, strings and booleans are ignored.
|
||||
data_format = "json"
|
||||
|
||||
# measurement name suffix (for separating different commands)
|
||||
### measurement name suffix (for separating different commands)
|
||||
name_suffix = "_mycollector"
|
||||
|
||||
### Below configuration will be used for data_format = "graphite", can be ignored for other data_format
|
||||
### If matching multiple measurement files, this string will be used to join the matched values.
|
||||
separator = "."
|
||||
|
||||
### Each template line requires a template pattern. It can have an optional
|
||||
### filter before the template and separated by spaces. It can also have optional extra
|
||||
### tags following the template. Multiple tags should be separated by commas and no spaces
|
||||
### similar to the line protocol format. The can be only one default template.
|
||||
### Templates support below format:
|
||||
### 1. filter + template
|
||||
### 2. filter + template + extra tag
|
||||
### 3. filter + template with field key
|
||||
### 4. default template
|
||||
templates = [
|
||||
"*.app env.service.resource.measurement",
|
||||
"stats.* .host.measurement* region=us-west,agent=sensu",
|
||||
"stats2.* .host.measurement.field",
|
||||
"measurement*"
|
||||
]
|
||||
### Data format to consume. This can be "json", "influx" or "graphite"
|
||||
### Each data format has it's own unique set of configuration options, read
|
||||
### more about them here:
|
||||
### https://github.com/influxdata/telegraf/blob/master/DATA_FORMATS.md
|
||||
data_format = "influx"
|
||||
`
|
||||
|
||||
type Exec struct {
|
||||
Commands []string
|
||||
Command string
|
||||
DataFormat string
|
||||
Commands []string
|
||||
Command string
|
||||
|
||||
Separator string
|
||||
Templates []string
|
||||
|
||||
encodingParser encoding.Parser
|
||||
|
||||
initedConfig bool
|
||||
parser parsers.Parser
|
||||
|
||||
wg sync.WaitGroup
|
||||
sync.Mutex
|
||||
|
||||
runner Runner
|
||||
errc chan error
|
||||
runner Runner
|
||||
errChan chan error
|
||||
}
|
||||
|
||||
func NewExec() *Exec {
|
||||
return &Exec{
|
||||
runner: CommandRunner{},
|
||||
}
|
||||
}
|
||||
|
||||
type Runner interface {
|
||||
@@ -95,22 +69,18 @@ func (c CommandRunner) Run(e *Exec, command string) ([]byte, error) {
|
||||
return out.Bytes(), nil
|
||||
}
|
||||
|
||||
func NewExec() *Exec {
|
||||
return &Exec{runner: CommandRunner{}}
|
||||
}
|
||||
|
||||
func (e *Exec) ProcessCommand(command string, acc telegraf.Accumulator) {
|
||||
defer e.wg.Done()
|
||||
|
||||
out, err := e.runner.Run(e, command)
|
||||
if err != nil {
|
||||
e.errc <- err
|
||||
e.errChan <- err
|
||||
return
|
||||
}
|
||||
|
||||
metrics, err := e.encodingParser.Parse(out)
|
||||
metrics, err := e.parser.Parse(out)
|
||||
if err != nil {
|
||||
e.errc <- err
|
||||
e.errChan <- err
|
||||
} else {
|
||||
for _, metric := range metrics {
|
||||
acc.AddFields(metric.Name(), metric.Fields(), metric.Tags(), metric.Time())
|
||||
@@ -118,66 +88,33 @@ func (e *Exec) ProcessCommand(command string, acc telegraf.Accumulator) {
|
||||
}
|
||||
}
|
||||
|
||||
func (e *Exec) initConfig() error {
|
||||
e.Lock()
|
||||
defer e.Unlock()
|
||||
|
||||
if e.Command != "" && len(e.Commands) < 1 {
|
||||
e.Commands = []string{e.Command}
|
||||
}
|
||||
|
||||
if e.DataFormat == "" {
|
||||
e.DataFormat = "json"
|
||||
}
|
||||
|
||||
var err error
|
||||
|
||||
configs := make(map[string]interface{})
|
||||
configs["Separator"] = e.Separator
|
||||
configs["Templates"] = e.Templates
|
||||
|
||||
e.encodingParser, err = encoding.NewParser(e.DataFormat, configs)
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("exec configuration is error: %s ", err.Error())
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e *Exec) SampleConfig() string {
|
||||
return sampleConfig
|
||||
}
|
||||
|
||||
func (e *Exec) Description() string {
|
||||
return "Read metrics from one or more commands that can output JSON, influx or graphite line protocol to stdout"
|
||||
return "Read metrics from one or more commands that can output to stdout"
|
||||
}
|
||||
|
||||
func (e *Exec) SetParser(parser parsers.Parser) {
|
||||
e.parser = parser
|
||||
}
|
||||
|
||||
func (e *Exec) Gather(acc telegraf.Accumulator) error {
|
||||
e.errChan = make(chan error, len(e.Commands))
|
||||
|
||||
if !e.initedConfig {
|
||||
if err := e.initConfig(); err != nil {
|
||||
return err
|
||||
}
|
||||
e.initedConfig = true
|
||||
}
|
||||
|
||||
e.Lock()
|
||||
e.errc = make(chan error, 10)
|
||||
e.Unlock()
|
||||
|
||||
e.wg.Add(len(e.Commands))
|
||||
for _, command := range e.Commands {
|
||||
e.wg.Add(1)
|
||||
go e.ProcessCommand(command, acc)
|
||||
}
|
||||
e.wg.Wait()
|
||||
|
||||
select {
|
||||
default:
|
||||
close(e.errc)
|
||||
close(e.errChan)
|
||||
return nil
|
||||
case err := <-e.errc:
|
||||
close(e.errc)
|
||||
case err := <-e.errChan:
|
||||
close(e.errChan)
|
||||
return err
|
||||
}
|
||||
|
||||
|
||||
@@ -4,6 +4,8 @@ import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/influxdata/telegraf/plugins/parsers"
|
||||
|
||||
"github.com/influxdata/telegraf/testutil"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
@@ -63,9 +65,11 @@ func (r runnerMock) Run(e *Exec, command string) ([]byte, error) {
|
||||
}
|
||||
|
||||
func TestExec(t *testing.T) {
|
||||
parser, _ := parsers.NewJSONParser("exec", []string{}, nil)
|
||||
e := &Exec{
|
||||
runner: newRunnerMock([]byte(validJson), nil),
|
||||
Commands: []string{"testcommand arg1"},
|
||||
parser: parser,
|
||||
}
|
||||
|
||||
var acc testutil.Accumulator
|
||||
@@ -87,9 +91,11 @@ func TestExec(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestExecMalformed(t *testing.T) {
|
||||
parser, _ := parsers.NewJSONParser("exec", []string{}, nil)
|
||||
e := &Exec{
|
||||
runner: newRunnerMock([]byte(malformedJson), nil),
|
||||
Commands: []string{"badcommand arg1"},
|
||||
parser: parser,
|
||||
}
|
||||
|
||||
var acc testutil.Accumulator
|
||||
@@ -99,9 +105,11 @@ func TestExecMalformed(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestCommandError(t *testing.T) {
|
||||
parser, _ := parsers.NewJSONParser("exec", []string{}, nil)
|
||||
e := &Exec{
|
||||
runner: newRunnerMock(nil, fmt.Errorf("exit status code 1")),
|
||||
Commands: []string{"badcommand"},
|
||||
parser: parser,
|
||||
}
|
||||
|
||||
var acc testutil.Accumulator
|
||||
@@ -111,10 +119,11 @@ func TestCommandError(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestLineProtocolParse(t *testing.T) {
|
||||
parser, _ := parsers.NewInfluxParser()
|
||||
e := &Exec{
|
||||
runner: newRunnerMock([]byte(lineProtocol), nil),
|
||||
Commands: []string{"line-protocol"},
|
||||
DataFormat: "influx",
|
||||
runner: newRunnerMock([]byte(lineProtocol), nil),
|
||||
Commands: []string{"line-protocol"},
|
||||
parser: parser,
|
||||
}
|
||||
|
||||
var acc testutil.Accumulator
|
||||
@@ -133,10 +142,11 @@ func TestLineProtocolParse(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestLineProtocolParseMultiple(t *testing.T) {
|
||||
parser, _ := parsers.NewInfluxParser()
|
||||
e := &Exec{
|
||||
runner: newRunnerMock([]byte(lineProtocolMulti), nil),
|
||||
Commands: []string{"line-protocol"},
|
||||
DataFormat: "influx",
|
||||
runner: newRunnerMock([]byte(lineProtocolMulti), nil),
|
||||
Commands: []string{"line-protocol"},
|
||||
parser: parser,
|
||||
}
|
||||
|
||||
var acc testutil.Accumulator
|
||||
@@ -158,15 +168,3 @@ func TestLineProtocolParseMultiple(t *testing.T) {
|
||||
acc.AssertContainsTaggedFields(t, "cpu", fields, tags)
|
||||
}
|
||||
}
|
||||
|
||||
func TestInvalidDataFormat(t *testing.T) {
|
||||
e := &Exec{
|
||||
runner: newRunnerMock([]byte(lineProtocol), nil),
|
||||
Commands: []string{"bad data format"},
|
||||
DataFormat: "FooBar",
|
||||
}
|
||||
|
||||
var acc testutil.Accumulator
|
||||
err := e.Gather(&acc)
|
||||
require.Error(t, err)
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user