exec plugin: allow using glob pattern in command list
Allow using glob pattern in the command list in configuration. This enables for example placing all commands in a single directory and using /path/to/dir/*.sh as one of the commands to run all shell scripts in that directory. Glob patterns are applied on every run of the commands, so matching commands can be added without restarting telegraf.
This commit is contained in:
parent
c6699c36d3
commit
9234c0b143
|
@ -15,6 +15,7 @@ to "stdout".
|
||||||
- [#1172](https://github.com/influxdata/telegraf/pull/1172): Ceph storage stats. Thanks @robinpercy!
|
- [#1172](https://github.com/influxdata/telegraf/pull/1172): Ceph storage stats. Thanks @robinpercy!
|
||||||
- [#1233](https://github.com/influxdata/telegraf/pull/1233): Updated golint gopsutil dependency.
|
- [#1233](https://github.com/influxdata/telegraf/pull/1233): Updated golint gopsutil dependency.
|
||||||
- [#479](https://github.com/influxdata/telegraf/issues/479): per-plugin execution time added to debug output.
|
- [#479](https://github.com/influxdata/telegraf/issues/479): per-plugin execution time added to debug output.
|
||||||
|
- [#1127](https://github.com/influxdata/telegraf/pull/1142): Support for glob patterns in exec plugin commands configuration.
|
||||||
|
|
||||||
### Bugfixes
|
### Bugfixes
|
||||||
|
|
||||||
|
|
|
@ -6,14 +6,17 @@ Please also see: [Telegraf Input Data Formats](https://github.com/influxdata/tel
|
||||||
|
|
||||||
#### Configuration
|
#### Configuration
|
||||||
|
|
||||||
In this example a script called ```/tmp/test.sh``` and a script called ```/tmp/test2.sh```
|
In this example a script called ```/tmp/test.sh```, a script called ```/tmp/test2.sh```, and
|
||||||
are configured for ```[[inputs.exec]]``` in JSON format.
|
all scripts matching glob pattern ```/tmp/collect_*.sh``` are configured for ```[[inputs.exec]]```
|
||||||
|
in JSON format. Glob patterns are matched on every run, so adding new scripts that match the pattern
|
||||||
|
will cause them to be picked up immediately.
|
||||||
|
|
||||||
```
|
```
|
||||||
# Read flattened metrics from one or more commands that output JSON to stdout
|
# Read flattened metrics from one or more commands that output JSON to stdout
|
||||||
[[inputs.exec]]
|
[[inputs.exec]]
|
||||||
# Shell/commands array
|
# Shell/commands array
|
||||||
commands = ["/tmp/test.sh", "/tmp/test2.sh"]
|
# Full command line to executable with parameters, or a glob pattern to run all matching files.
|
||||||
|
commands = ["/tmp/test.sh", "/tmp/test2.sh", "/tmp/collect_*.sh"]
|
||||||
|
|
||||||
# Data format to consume.
|
# Data format to consume.
|
||||||
# NOTE json only reads numerical measurements, strings and booleans are ignored.
|
# NOTE json only reads numerical measurements, strings and booleans are ignored.
|
||||||
|
@ -180,4 +183,3 @@ sensu.metric.net.server0.eth0.rx_dropped 0 1444234982
|
||||||
The templates configuration will be used to parse the graphite metrics to support influxdb/opentsdb tagging store engines.
|
The templates configuration will be used to parse the graphite metrics to support influxdb/opentsdb tagging store engines.
|
||||||
|
|
||||||
More detail information about templates, please refer to [The graphite Input](https://github.com/influxdata/influxdb/blob/master/services/graphite/README.md)
|
More detail information about templates, please refer to [The graphite Input](https://github.com/influxdata/influxdb/blob/master/services/graphite/README.md)
|
||||||
|
|
||||||
|
|
|
@ -4,6 +4,8 @@ import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"fmt"
|
"fmt"
|
||||||
"os/exec"
|
"os/exec"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
"syscall"
|
"syscall"
|
||||||
"time"
|
"time"
|
||||||
|
@ -19,7 +21,11 @@ import (
|
||||||
|
|
||||||
const sampleConfig = `
|
const sampleConfig = `
|
||||||
## Commands array
|
## Commands array
|
||||||
commands = ["/tmp/test.sh", "/usr/bin/mycollector --foo=bar"]
|
commands = [
|
||||||
|
"/tmp/test.sh",
|
||||||
|
"/usr/bin/mycollector --foo=bar",
|
||||||
|
"/tmp/collect_*.sh"
|
||||||
|
]
|
||||||
|
|
||||||
## Timeout for each command to complete.
|
## Timeout for each command to complete.
|
||||||
timeout = "5s"
|
timeout = "5s"
|
||||||
|
@ -150,10 +156,36 @@ func (e *Exec) Gather(acc telegraf.Accumulator) error {
|
||||||
e.Command = ""
|
e.Command = ""
|
||||||
}
|
}
|
||||||
|
|
||||||
e.errChan = make(chan error, len(e.Commands))
|
commands := make([]string, 0, len(e.Commands))
|
||||||
|
for _, pattern := range e.Commands {
|
||||||
|
cmdAndArgs := strings.SplitN(pattern, " ", 2)
|
||||||
|
if len(cmdAndArgs) == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
e.wg.Add(len(e.Commands))
|
matches, err := filepath.Glob(cmdAndArgs[0])
|
||||||
for _, command := range e.Commands {
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(matches) == 0 {
|
||||||
|
// There were no matches with the glob pattern, so let's assume
|
||||||
|
// that the command is in PATH and just run it as it is
|
||||||
|
commands = append(commands, pattern)
|
||||||
|
} else {
|
||||||
|
// There were matches, so we'll append each match together with
|
||||||
|
// the arguments to the commands slice
|
||||||
|
for _, match := range matches {
|
||||||
|
commands = append(
|
||||||
|
commands, strings.Join([]string{match, cmdAndArgs[1]}, " "))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
e.errChan = make(chan error, len(commands))
|
||||||
|
|
||||||
|
e.wg.Add(len(commands))
|
||||||
|
for _, command := range commands {
|
||||||
go e.ProcessCommand(command, acc)
|
go e.ProcessCommand(command, acc)
|
||||||
}
|
}
|
||||||
e.wg.Wait()
|
e.wg.Wait()
|
||||||
|
|
|
@ -169,3 +169,51 @@ func TestLineProtocolParseMultiple(t *testing.T) {
|
||||||
acc.AssertContainsTaggedFields(t, "cpu", fields, tags)
|
acc.AssertContainsTaggedFields(t, "cpu", fields, tags)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestExecCommandWithGlob(t *testing.T) {
|
||||||
|
parser, _ := parsers.NewValueParser("metric", "string", nil)
|
||||||
|
e := NewExec()
|
||||||
|
e.Commands = []string{"/bin/ech* metric_value"}
|
||||||
|
e.SetParser(parser)
|
||||||
|
|
||||||
|
var acc testutil.Accumulator
|
||||||
|
err := e.Gather(&acc)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
fields := map[string]interface{}{
|
||||||
|
"value": "metric_value",
|
||||||
|
}
|
||||||
|
acc.AssertContainsFields(t, "metric", fields)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExecCommandWithoutGlob(t *testing.T) {
|
||||||
|
parser, _ := parsers.NewValueParser("metric", "string", nil)
|
||||||
|
e := NewExec()
|
||||||
|
e.Commands = []string{"/bin/echo metric_value"}
|
||||||
|
e.SetParser(parser)
|
||||||
|
|
||||||
|
var acc testutil.Accumulator
|
||||||
|
err := e.Gather(&acc)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
fields := map[string]interface{}{
|
||||||
|
"value": "metric_value",
|
||||||
|
}
|
||||||
|
acc.AssertContainsFields(t, "metric", fields)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExecCommandWithoutGlobAndPath(t *testing.T) {
|
||||||
|
parser, _ := parsers.NewValueParser("metric", "string", nil)
|
||||||
|
e := NewExec()
|
||||||
|
e.Commands = []string{"echo metric_value"}
|
||||||
|
e.SetParser(parser)
|
||||||
|
|
||||||
|
var acc testutil.Accumulator
|
||||||
|
err := e.Gather(&acc)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
fields := map[string]interface{}{
|
||||||
|
"value": "metric_value",
|
||||||
|
}
|
||||||
|
acc.AssertContainsFields(t, "metric", fields)
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in New Issue