Add polling method to logparser and tail inputs (#3213)
This commit is contained in:
parent
cb40972635
commit
b06e2a0c3d
|
@ -21,6 +21,9 @@ regex patterns.
|
|||
## be read from the beginning.
|
||||
from_beginning = false
|
||||
|
||||
## Method used to watch for file updates. Can be either "inotify" or "poll".
|
||||
# watch_method = "inotify"
|
||||
|
||||
## Parse logstash-style "grok" patterns:
|
||||
## Telegraf built-in parsing patterns: https://goo.gl/dkay10
|
||||
[inputs.logparser.grok]
|
||||
|
|
|
@ -19,6 +19,10 @@ import (
|
|||
"github.com/influxdata/telegraf/plugins/inputs/logparser/grok"
|
||||
)
|
||||
|
||||
const (
|
||||
defaultWatchMethod = "inotify"
|
||||
)
|
||||
|
||||
// LogParser in the primary interface for the plugin
|
||||
type LogParser interface {
|
||||
ParseLine(line string) (telegraf.Metric, error)
|
||||
|
@ -34,6 +38,7 @@ type logEntry struct {
|
|||
type LogParserPlugin struct {
|
||||
Files []string
|
||||
FromBeginning bool
|
||||
WatchMethod string
|
||||
|
||||
tailers map[string]*tail.Tail
|
||||
lines chan logEntry
|
||||
|
@ -61,6 +66,9 @@ const sampleConfig = `
|
|||
## be read from the beginning.
|
||||
from_beginning = false
|
||||
|
||||
## Method used to watch for file updates. Can be either "inotify" or "poll".
|
||||
# watch_method = "inotify"
|
||||
|
||||
## Parse logstash-style "grok" patterns:
|
||||
## Telegraf built-in parsing patterns: https://goo.gl/dkay10
|
||||
[inputs.logparser.grok]
|
||||
|
@ -167,6 +175,11 @@ func (l *LogParserPlugin) tailNewfiles(fromBeginning bool) error {
|
|||
seek.Offset = 0
|
||||
}
|
||||
|
||||
var poll bool
|
||||
if l.WatchMethod == "poll" {
|
||||
poll = true
|
||||
}
|
||||
|
||||
// Create a "tailer" for each file
|
||||
for _, filepath := range l.Files {
|
||||
g, err := globpath.Compile(filepath)
|
||||
|
@ -188,6 +201,7 @@ func (l *LogParserPlugin) tailNewfiles(fromBeginning bool) error {
|
|||
Follow: true,
|
||||
Location: &seek,
|
||||
MustExist: true,
|
||||
Poll: poll,
|
||||
Logger: tail.DiscardingLogger,
|
||||
})
|
||||
if err != nil {
|
||||
|
@ -285,6 +299,8 @@ func (l *LogParserPlugin) Stop() {
|
|||
|
||||
func init() {
|
||||
inputs.Add("logparser", func() telegraf.Input {
|
||||
return &LogParserPlugin{}
|
||||
return &LogParserPlugin{
|
||||
WatchMethod: defaultWatchMethod,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
|
@ -39,6 +39,9 @@ The plugin expects messages in one of the
|
|||
## Whether file is a named pipe
|
||||
pipe = false
|
||||
|
||||
## Method used to watch for file updates. Can be either "inotify" or "poll".
|
||||
# watch_method = "inotify"
|
||||
|
||||
## Data format to consume.
|
||||
## Each data format has its own unique set of configuration options, read
|
||||
## more about them here:
|
||||
|
|
|
@ -15,10 +15,15 @@ import (
|
|||
"github.com/influxdata/telegraf/plugins/parsers"
|
||||
)
|
||||
|
||||
const (
|
||||
defaultWatchMethod = "inotify"
|
||||
)
|
||||
|
||||
type Tail struct {
|
||||
Files []string
|
||||
FromBeginning bool
|
||||
Pipe bool
|
||||
WatchMethod string
|
||||
|
||||
tailers []*tail.Tail
|
||||
parser parsers.Parser
|
||||
|
@ -50,6 +55,9 @@ const sampleConfig = `
|
|||
## Whether file is a named pipe
|
||||
pipe = false
|
||||
|
||||
## Method used to watch for file updates. Can be either "inotify" or "poll".
|
||||
# watch_method = "inotify"
|
||||
|
||||
## Data format to consume.
|
||||
## Each data format has its own unique set of configuration options, read
|
||||
## more about them here:
|
||||
|
@ -83,6 +91,11 @@ func (t *Tail) Start(acc telegraf.Accumulator) error {
|
|||
}
|
||||
}
|
||||
|
||||
var poll bool
|
||||
if t.WatchMethod == "poll" {
|
||||
poll = true
|
||||
}
|
||||
|
||||
// Create a "tailer" for each file
|
||||
for _, filepath := range t.Files {
|
||||
g, err := globpath.Compile(filepath)
|
||||
|
@ -96,6 +109,7 @@ func (t *Tail) Start(acc telegraf.Accumulator) error {
|
|||
Follow: true,
|
||||
Location: seek,
|
||||
MustExist: true,
|
||||
Poll: poll,
|
||||
Pipe: t.Pipe,
|
||||
Logger: tail.DiscardingLogger,
|
||||
})
|
||||
|
|
Loading…
Reference in New Issue