From e1e6a08f39667b80ddcc1c2b86d690d6638f351a Mon Sep 17 00:00:00 2001 From: Max U Date: Mon, 25 Jun 2018 15:32:27 -0700 Subject: [PATCH] add grok as a top level parser, still need README --- internal/config/config.go | 58 ++++++++++++++++++++++ plugins/inputs/all/all.go | 2 +- plugins/inputs/reader/reader.go | 73 ++-------------------------- plugins/inputs/reader/reader_test.go | 26 ++++++++-- 4 files changed, 84 insertions(+), 75 deletions(-) diff --git a/internal/config/config.go b/internal/config/config.go index 8a31c271e..1a98c61c5 100644 --- a/internal/config/config.go +++ b/internal/config/config.go @@ -1338,6 +1338,59 @@ func buildParser(name string, tbl *ast.Table) (parsers.Parser, error) { } } + //for grok data_format + if node, ok := tbl.Fields["named_patterns"]; ok { + if kv, ok := node.(*ast.KeyValue); ok { + if ary, ok := kv.Value.(*ast.Array); ok { + for _, elem := range ary.Value { + if str, ok := elem.(*ast.String); ok { + c.NamedPatterns = append(c.NamedPatterns, str.Value) + } + } + } + } + } + + if node, ok := tbl.Fields["patterns"]; ok { + if kv, ok := node.(*ast.KeyValue); ok { + if ary, ok := kv.Value.(*ast.Array); ok { + for _, elem := range ary.Value { + if str, ok := elem.(*ast.String); ok { + c.Patterns = append(c.Patterns, str.Value) + } + } + } + } + } + + if node, ok := tbl.Fields["custom_patterns"]; ok { + if kv, ok := node.(*ast.KeyValue); ok { + if str, ok := kv.Value.(*ast.String); ok { + c.CustomPatterns = str.Value + } + } + } + + if node, ok := tbl.Fields["custom_pattern_files"]; ok { + if kv, ok := node.(*ast.KeyValue); ok { + if ary, ok := kv.Value.(*ast.Array); ok { + for _, elem := range ary.Value { + if str, ok := elem.(*ast.String); ok { + c.CustomPatternFiles = append(c.CustomPatternFiles, str.Value) + } + } + } + } + } + + if node, ok := tbl.Fields["timezone"]; ok { + if kv, ok := node.(*ast.KeyValue); ok { + if str, ok := kv.Value.(*ast.String); ok { + c.TimeZone = str.Value + } + } + } + c.MetricName = name delete(tbl.Fields, "data_format") @@ -1353,6 +1406,11 @@ func buildParser(name string, tbl *ast.Table) (parsers.Parser, error) { delete(tbl.Fields, "dropwizard_time_format") delete(tbl.Fields, "dropwizard_tags_path") delete(tbl.Fields, "dropwizard_tag_paths") + delete(tbl.Fields, "named_patterns") + delete(tbl.Fields, "patterns") + delete(tbl.Fields, "custom_patterns") + delete(tbl.Fields, "custom_pattern_files") + delete(tbl.Fields, "timezone") return parsers.NewParser(c) } diff --git a/plugins/inputs/all/all.go b/plugins/inputs/all/all.go index b2be2be5a..de34847d6 100644 --- a/plugins/inputs/all/all.go +++ b/plugins/inputs/all/all.go @@ -85,7 +85,7 @@ import ( _ "github.com/influxdata/telegraf/plugins/inputs/puppetagent" _ "github.com/influxdata/telegraf/plugins/inputs/rabbitmq" _ "github.com/influxdata/telegraf/plugins/inputs/raindrops" - _ "github.com/influxdata/telegraf/plugins/inputs/redis" + _ "github.com/influxdata/telegraf/plugins/inputs/reader" _ "github.com/influxdata/telegraf/plugins/inputs/rethinkdb" _ "github.com/influxdata/telegraf/plugins/inputs/riak" _ "github.com/influxdata/telegraf/plugins/inputs/salesforce" diff --git a/plugins/inputs/reader/reader.go b/plugins/inputs/reader/reader.go index 853405745..bfccb87d3 100644 --- a/plugins/inputs/reader/reader.go +++ b/plugins/inputs/reader/reader.go @@ -13,19 +13,9 @@ import ( type Reader struct { Filepaths []string `toml:"files"` FromBeginning bool - DataFormat string `toml:"data_format"` - ParserConfig parsers.Config - Parser parsers.Parser - Tags []string + parser parsers.Parser Filenames []string - - //for grok parser - Patterns []string - namedPatterns []string - CustomPatterns string - CustomPatternFiles []string - TZone string } const sampleConfig = `## Files to parse. @@ -41,38 +31,6 @@ files = ["/var/log/apache/access.log"] ## more about them here: ## https://github.com/influxdata/telegraf/blob/master/docs/DATA_FORMATS_INPUT.md data_format = "" - -## Parse logstash-style "grok" patterns: -## Telegraf built-in parsing patterns: https://goo.gl/dkay10 -[inputs.logparser.grok] - ## This is a list of patterns to check the given log file(s) for. - ## Note that adding patterns here increases processing time. The most - ## efficient configuration is to have one pattern per logparser. - ## Other common built-in patterns are: - ## %{COMMON_LOG_FORMAT} (plain apache & nginx access logs) - ## %{COMBINED_LOG_FORMAT} (access logs + referrer & agent) - patterns = ["%{COMBINED_LOG_FORMAT}"] - - ## Name of the outputted measurement name. - measurement = "apache_access_log" - - ## Full path(s) to custom pattern files. - custom_pattern_files = [] - - ## Custom patterns can also be defined here. Put one pattern per line. - custom_patterns = ''' - ''' - - ## Timezone allows you to provide an override for timestamps that - ## don't already include an offset - ## e.g. 04/06/2016 12:41:45 data one two 5.43µs - ## - ## Default: "" which renders UTC - ## Options are as follows: - ## 1. Local -- interpret based on machine localtime - ## 2. "Canada/Eastern" -- Unix TZ values like those found in https://en.wikipedia.org/wiki/List_of_tz_database_time_zones - ## 3. UTC -- or blank/unspecified, will return timestamp in UTC - timezone = "Canada/Eastern" ` // SampleConfig returns the default configuration of the Input @@ -100,31 +58,7 @@ func (r *Reader) Gather(acc telegraf.Accumulator) error { } func (r *Reader) SetParser(p parsers.Parser) { - r.Parser = p -} - -func (r *Reader) compileParser() { - if r.DataFormat == "" { - log.Printf("E! No data_format specified") - return - } - r.ParserConfig = parsers.Config{ - DataFormat: r.DataFormat, - TagKeys: r.Tags, - - //grok settings - Patterns: r.Patterns, - NamedPatterns: r.namedPatterns, - CustomPatterns: r.CustomPatterns, - CustomPatternFiles: r.CustomPatternFiles, - TimeZone: r.TZone, - } - nParser, err := parsers.NewParser(&r.ParserConfig) - if err != nil { - log.Printf("E! Error building parser: %v", err) - } - - r.Parser = nParser + r.parser = p } func (r *Reader) refreshFilePaths() { @@ -151,8 +85,7 @@ func (r *Reader) readMetric(filename string) ([]telegraf.Metric, error) { if err != nil { log.Printf("E! File could not be opened: %v", filename) } - - return r.Parser.Parse(fileContents) + return r.parser.Parse(fileContents) } diff --git a/plugins/inputs/reader/reader_test.go b/plugins/inputs/reader/reader_test.go index cb6fb44a8..dcfa9ffc3 100644 --- a/plugins/inputs/reader/reader_test.go +++ b/plugins/inputs/reader/reader_test.go @@ -6,6 +6,7 @@ import ( "strings" "testing" + "github.com/influxdata/telegraf/plugins/parsers" "github.com/influxdata/telegraf/testutil" "github.com/stretchr/testify/assert" ) @@ -28,7 +29,14 @@ func TestJSONParserCompile(t *testing.T) { DataFormat: "json", Tags: []string{"parent_ignored_child"}, } - r.compileParser() + parserConfig := parsers.Config{ + DataFormat: r.DataFormat, + TagKeys: r.Tags, + } + nParser, err := parsers.NewParser(&parserConfig) + r.parser = nParser + assert.NoError(t, err) + r.Gather(&acc) log.Printf("acc: %v", acc.Metrics[0].Tags) assert.Equal(t, map[string]string{"parent_ignored_child": "hi"}, acc.Metrics[0].Tags) @@ -41,15 +49,25 @@ func TestGrokParser(t *testing.T) { r := Reader{ Filepaths: []string{testDir + "/reader/testfiles/grok_a.log"}, DataFormat: "grok", - Patterns: []string{"%{COMMON_LOG_FORMAT}"}, } - r.compileParser() - err := r.Gather(&acc) + parserConfig := parsers.Config{ + DataFormat: r.DataFormat, + TagKeys: r.Tags, + Patterns: []string{"{%COMMON-LOG-FORMAT}"}, + } + + nParser, err := parsers.NewParser(&parserConfig) + r.parser = nParser + assert.NoError(t, err) + + log.Printf("path: %v", r.Filepaths[0]) + err = r.Gather(&acc) log.Printf("err: %v", err) log.Printf("metric[0]_tags: %v, metric[0]_fields: %v", acc.Metrics[0].Tags, acc.Metrics[0].Fields) log.Printf("metric[1]_tags: %v, metric[1]_fields: %v", acc.Metrics[1].Tags, acc.Metrics[1].Fields) assert.Equal(t, 2, len(acc.Metrics)) + t.Error() } func getPluginDir() string {