exec plugin: allow using glob pattern in command list

Allow using glob pattern in the command list in configuration. This enables for
example placing all commands in a single directory and using /path/to/dir/*.sh
as one of the commands to run all shell scripts in that directory.

Glob patterns are applied on every run of the commands, so matching commands can
be added without restarting telegraf.

closes #1142
This commit is contained in:
Jari Sukanen 2016-04-29 16:07:01 +03:00 committed by Cameron Sparr
parent 98d91b1c89
commit 6828fc48e1
4 changed files with 91 additions and 8 deletions

View File

@ -25,6 +25,7 @@ time before a new metric is included by the plugin.
- [#1272](https://github.com/influxdata/telegraf/pull/1272): graphite parser: add ability to specify multiple tag keys, for consistency with influxdb parser.
- [#1265](https://github.com/influxdata/telegraf/pull/1265): Make dns lookups for chrony configurable. Thanks @zbindenren!
- [#1275](https://github.com/influxdata/telegraf/pull/1275): Allow wildcard filtering of varnish stats.
- [#1142](https://github.com/influxdata/telegraf/pull/1142): Support for glob patterns in exec plugin commands configuration.
### Bugfixes

View File

@ -6,14 +6,17 @@ Please also see: [Telegraf Input Data Formats](https://github.com/influxdata/tel
#### Configuration
In this example a script called ```/tmp/test.sh``` and a script called ```/tmp/test2.sh```
are configured for ```[[inputs.exec]]``` in JSON format.
In this example a script called ```/tmp/test.sh```, a script called ```/tmp/test2.sh```, and
all scripts matching glob pattern ```/tmp/collect_*.sh``` are configured for ```[[inputs.exec]]```
in JSON format. Glob patterns are matched on every run, so adding new scripts that match the pattern
will cause them to be picked up immediately.
```
# Read flattened metrics from one or more commands that output JSON to stdout
[[inputs.exec]]
# Shell/commands array
commands = ["/tmp/test.sh", "/tmp/test2.sh"]
# Full command line to executable with parameters, or a glob pattern to run all matching files.
commands = ["/tmp/test.sh", "/tmp/test2.sh", "/tmp/collect_*.sh"]
# Data format to consume.
# NOTE json only reads numerical measurements, strings and booleans are ignored.
@ -180,4 +183,3 @@ sensu.metric.net.server0.eth0.rx_dropped 0 1444234982
The templates configuration will be used to parse the graphite metrics to support influxdb/opentsdb tagging store engines.
More detail information about templates, please refer to [The graphite Input](https://github.com/influxdata/influxdb/blob/master/services/graphite/README.md)

View File

@ -4,6 +4,8 @@ import (
"bytes"
"fmt"
"os/exec"
"path/filepath"
"strings"
"sync"
"syscall"
"time"
@ -19,7 +21,11 @@ import (
const sampleConfig = `
## Commands array
commands = ["/tmp/test.sh", "/usr/bin/mycollector --foo=bar"]
commands = [
"/tmp/test.sh",
"/usr/bin/mycollector --foo=bar",
"/tmp/collect_*.sh"
]
## Timeout for each command to complete.
timeout = "5s"
@ -150,10 +156,36 @@ func (e *Exec) Gather(acc telegraf.Accumulator) error {
e.Command = ""
}
e.errChan = make(chan error, len(e.Commands))
commands := make([]string, 0, len(e.Commands))
for _, pattern := range e.Commands {
cmdAndArgs := strings.SplitN(pattern, " ", 2)
if len(cmdAndArgs) == 0 {
continue
}
e.wg.Add(len(e.Commands))
for _, command := range e.Commands {
matches, err := filepath.Glob(cmdAndArgs[0])
if err != nil {
return err
}
if len(matches) == 0 {
// There were no matches with the glob pattern, so let's assume
// that the command is in PATH and just run it as it is
commands = append(commands, pattern)
} else {
// There were matches, so we'll append each match together with
// the arguments to the commands slice
for _, match := range matches {
commands = append(
commands, strings.Join([]string{match, cmdAndArgs[1]}, " "))
}
}
}
e.errChan = make(chan error, len(commands))
e.wg.Add(len(commands))
for _, command := range commands {
go e.ProcessCommand(command, acc)
}
e.wg.Wait()

View File

@ -169,3 +169,51 @@ func TestLineProtocolParseMultiple(t *testing.T) {
acc.AssertContainsTaggedFields(t, "cpu", fields, tags)
}
}
func TestExecCommandWithGlob(t *testing.T) {
parser, _ := parsers.NewValueParser("metric", "string", nil)
e := NewExec()
e.Commands = []string{"/bin/ech* metric_value"}
e.SetParser(parser)
var acc testutil.Accumulator
err := e.Gather(&acc)
require.NoError(t, err)
fields := map[string]interface{}{
"value": "metric_value",
}
acc.AssertContainsFields(t, "metric", fields)
}
func TestExecCommandWithoutGlob(t *testing.T) {
parser, _ := parsers.NewValueParser("metric", "string", nil)
e := NewExec()
e.Commands = []string{"/bin/echo metric_value"}
e.SetParser(parser)
var acc testutil.Accumulator
err := e.Gather(&acc)
require.NoError(t, err)
fields := map[string]interface{}{
"value": "metric_value",
}
acc.AssertContainsFields(t, "metric", fields)
}
func TestExecCommandWithoutGlobAndPath(t *testing.T) {
parser, _ := parsers.NewValueParser("metric", "string", nil)
e := NewExec()
e.Commands = []string{"echo metric_value"}
e.SetParser(parser)
var acc testutil.Accumulator
err := e.Gather(&acc)
require.NoError(t, err)
fields := map[string]interface{}{
"value": "metric_value",
}
acc.AssertContainsFields(t, "metric", fields)
}