address some of Daniel's comments

This commit is contained in:
Max U 2018-07-03 11:29:11 -07:00
parent 04f09d65bf
commit 8063b38b2d
6 changed files with 47 additions and 58 deletions

View File

@ -726,32 +726,32 @@ HTTPD_ERRORLOG %{HTTPD20_ERRORLOG}|%{HTTPD24_ERRORLOG}
#### Grok Configuration: #### Grok Configuration:
```toml ```toml
[inputs.reader] [[inputs.reader]]
## This is a list of patterns to check the given log file(s) for. ## This is a list of patterns to check the given log file(s) for.
## Note that adding patterns here increases processing time. The most ## Note that adding patterns here increases processing time. The most
## efficient configuration is to have one pattern per logparser. ## efficient configuration is to have one pattern per logparser.
## Other common built-in patterns are: ## Other common built-in patterns are:
## %{COMMON_LOG_FORMAT} (plain apache & nginx access logs) ## %{COMMON_LOG_FORMAT} (plain apache & nginx access logs)
## %{COMBINED_LOG_FORMAT} (access logs + referrer & agent) ## %{COMBINED_LOG_FORMAT} (access logs + referrer & agent)
patterns = ["%{COMBINED_LOG_FORMAT}"] grok_patterns = ["%{COMBINED_LOG_FORMAT}"]
## Name of the outputted measurement name. ## Name of the outputted measurement name.
name_override = "apache_access_log" grok_name_override = "apache_access_log"
## Full path(s) to custom pattern files. ## Full path(s) to custom pattern files.
custom_pattern_files = [] grok_custom_pattern_files = []
## Custom patterns can also be defined here. Put one pattern per line. ## Custom patterns can also be defined here. Put one pattern per line.
custom_patterns = ''' grok_custom_patterns = '''
## Timezone allows you to provide an override for timestamps that ## Timezone allows you to provide an override for timestamps that
## don't already include an offset ## don't already include an offset
## e.g. 04/06/2016 12:41:45 data one two 5.43µs ## e.g. 04/06/2016 12:41:45 data one two 5.43µs
## ##
## Default: "" which renders UTC ## Default: "" which renders UTC
## Options are as follows: ## Options are as follows:
## 1. Local -- interpret based on machine localtime ## 1. Local -- interpret based on machine localtime
## 2. "Canada/Eastern" -- Unix TZ values like those found in https://en.wikipedia.org/wiki/List_of_tz_database_time_zones ## 2. "Canada/Eastern" -- Unix TZ values like those found in https://en.wikipedia.org/wiki/List_of_tz_database_time_zones
## 3. UTC -- or blank/unspecified, will return timestamp in UTC ## 3. UTC -- or blank/unspecified, will return timestamp in UTC
timezone = "Canada/Eastern" grok_timezone = "Canada/Eastern"
``` ```

View File

@ -1339,7 +1339,7 @@ func buildParser(name string, tbl *ast.Table) (parsers.Parser, error) {
} }
//for grok data_format //for grok data_format
if node, ok := tbl.Fields["named_patterns"]; ok { if node, ok := tbl.Fields["grok_named_patterns"]; ok {
if kv, ok := node.(*ast.KeyValue); ok { if kv, ok := node.(*ast.KeyValue); ok {
if ary, ok := kv.Value.(*ast.Array); ok { if ary, ok := kv.Value.(*ast.Array); ok {
for _, elem := range ary.Value { for _, elem := range ary.Value {
@ -1351,7 +1351,7 @@ func buildParser(name string, tbl *ast.Table) (parsers.Parser, error) {
} }
} }
if node, ok := tbl.Fields["patterns"]; ok { if node, ok := tbl.Fields["grok_patterns"]; ok {
if kv, ok := node.(*ast.KeyValue); ok { if kv, ok := node.(*ast.KeyValue); ok {
if ary, ok := kv.Value.(*ast.Array); ok { if ary, ok := kv.Value.(*ast.Array); ok {
for _, elem := range ary.Value { for _, elem := range ary.Value {
@ -1363,7 +1363,7 @@ func buildParser(name string, tbl *ast.Table) (parsers.Parser, error) {
} }
} }
if node, ok := tbl.Fields["custom_patterns"]; ok { if node, ok := tbl.Fields["grok_custom_patterns"]; ok {
if kv, ok := node.(*ast.KeyValue); ok { if kv, ok := node.(*ast.KeyValue); ok {
if str, ok := kv.Value.(*ast.String); ok { if str, ok := kv.Value.(*ast.String); ok {
c.CustomPatterns = str.Value c.CustomPatterns = str.Value
@ -1371,7 +1371,7 @@ func buildParser(name string, tbl *ast.Table) (parsers.Parser, error) {
} }
} }
if node, ok := tbl.Fields["custom_pattern_files"]; ok { if node, ok := tbl.Fields["grok_custom_pattern_files"]; ok {
if kv, ok := node.(*ast.KeyValue); ok { if kv, ok := node.(*ast.KeyValue); ok {
if ary, ok := kv.Value.(*ast.Array); ok { if ary, ok := kv.Value.(*ast.Array); ok {
for _, elem := range ary.Value { for _, elem := range ary.Value {
@ -1383,7 +1383,7 @@ func buildParser(name string, tbl *ast.Table) (parsers.Parser, error) {
} }
} }
if node, ok := tbl.Fields["timezone"]; ok { if node, ok := tbl.Fields["grok_timezone"]; ok {
if kv, ok := node.(*ast.KeyValue); ok { if kv, ok := node.(*ast.KeyValue); ok {
if str, ok := kv.Value.(*ast.String); ok { if str, ok := kv.Value.(*ast.String); ok {
c.TimeZone = str.Value c.TimeZone = str.Value
@ -1406,11 +1406,11 @@ func buildParser(name string, tbl *ast.Table) (parsers.Parser, error) {
delete(tbl.Fields, "dropwizard_time_format") delete(tbl.Fields, "dropwizard_time_format")
delete(tbl.Fields, "dropwizard_tags_path") delete(tbl.Fields, "dropwizard_tags_path")
delete(tbl.Fields, "dropwizard_tag_paths") delete(tbl.Fields, "dropwizard_tag_paths")
delete(tbl.Fields, "named_patterns") delete(tbl.Fields, "grok_named_patterns")
delete(tbl.Fields, "patterns") delete(tbl.Fields, "grok_patterns")
delete(tbl.Fields, "custom_patterns") delete(tbl.Fields, "grok_custom_patterns")
delete(tbl.Fields, "custom_pattern_files") delete(tbl.Fields, "grok_custom_pattern_files")
delete(tbl.Fields, "timezone") delete(tbl.Fields, "grok_timezone")
return parsers.NewParser(c) return parsers.NewParser(c)
} }

View File

@ -1,8 +1,8 @@
package reader package reader
import ( import (
"fmt"
"io/ioutil" "io/ioutil"
"log"
"github.com/influxdata/telegraf" "github.com/influxdata/telegraf"
"github.com/influxdata/telegraf/internal/globpath" "github.com/influxdata/telegraf/internal/globpath"
@ -50,14 +50,8 @@ func (r *Reader) Gather(acc telegraf.Accumulator) error {
return err return err
} }
for i, m := range metrics { for _, m := range metrics {
acc.AddFields(m.Name(), m.Fields(), m.Tags(), m.Time())
//error if m is nil
if m == nil {
log.Printf("E! Metric could not be parsed from: %v, on line %v", k, i)
continue
}
acc.AddFields(m.Name(), m.Fields(), m.Tags())
} }
} }
return nil return nil
@ -67,13 +61,12 @@ func (r *Reader) SetParser(p parsers.Parser) {
r.parser = p r.parser = p
} }
func (r *Reader) refreshFilePaths() { func (r *Reader) refreshFilePaths() error {
var allFiles []string var allFiles []string
for _, filepath := range r.Filepaths { for _, filepath := range r.Filepaths {
g, err := globpath.Compile(filepath) g, err := globpath.Compile(filepath)
if err != nil { if err != nil {
log.Printf("E! Error Glob %s failed to compile, %s", filepath, err) return fmt.Errorf("E! Error Glob: %v could not be compiled, %s", filepath, err)
continue
} }
files := g.Match() files := g.Match()
@ -83,13 +76,13 @@ func (r *Reader) refreshFilePaths() {
} }
r.Filenames = allFiles r.Filenames = allFiles
return nil
} }
//requires that Parser has been compiled
func (r *Reader) readMetric(filename string) ([]telegraf.Metric, error) { func (r *Reader) readMetric(filename string) ([]telegraf.Metric, error) {
fileContents, err := ioutil.ReadFile(filename) fileContents, err := ioutil.ReadFile(filename)
if err != nil { if err != nil {
log.Printf("E! File could not be opened: %v", filename) return nil, fmt.Errorf("E! Error file: %v could not be read, %s", filename, err)
} }
return r.parser.Parse(fileContents) return r.parser.Parse(fileContents)

View File

@ -71,7 +71,7 @@ type Parser struct {
NamedPatterns []string NamedPatterns []string
CustomPatterns string CustomPatterns string
CustomPatternFiles []string CustomPatternFiles []string
Measurement string MetricName string
// Timezone is an optional component to help render log dates to // Timezone is an optional component to help render log dates to
// your chosen zone. // your chosen zone.
@ -167,10 +167,6 @@ func (p *Parser) Compile() error {
p.addCustomPatterns(scanner) p.addCustomPatterns(scanner)
} }
if p.Measurement == "" {
p.Measurement = "logparser_grok"
}
p.loc, err = time.LoadLocation(p.Timezone) p.loc, err = time.LoadLocation(p.Timezone)
if err != nil { if err != nil {
log.Printf("W! improper timezone supplied (%s), setting loc to UTC", p.Timezone) log.Printf("W! improper timezone supplied (%s), setting loc to UTC", p.Timezone)
@ -348,7 +344,7 @@ func (p *Parser) ParseLine(line string) (telegraf.Metric, error) {
return nil, fmt.Errorf("logparser_grok: must have one or more fields") return nil, fmt.Errorf("logparser_grok: must have one or more fields")
} }
return metric.New(p.Measurement, tags, fields, p.tsModder.tsMod(timestamp)) return metric.New(p.MetricName, tags, fields, p.tsModder.tsMod(timestamp))
} }
func (p *Parser) Parse(buf []byte) ([]telegraf.Metric, error) { func (p *Parser) Parse(buf []byte) ([]telegraf.Metric, error) {

View File

@ -9,8 +9,8 @@ import (
func TestGrokParse(t *testing.T) { func TestGrokParse(t *testing.T) {
parser := Parser{ parser := Parser{
Measurement: "t_met", MetricName: "t_met",
Patterns: []string{"%{COMMON_LOG_FORMAT}"}, Patterns: []string{"%{COMMON_LOG_FORMAT}"},
} }
parser.Compile() parser.Compile()
metrics, err := parser.Parse([]byte(`127.0.0.1 user-identifier frank [10/Oct/2000:13:55:36 -0700] "GET /apache_pb.gif HTTP/1.0" 200 2326`)) metrics, err := parser.Parse([]byte(`127.0.0.1 user-identifier frank [10/Oct/2000:13:55:36 -0700] "GET /apache_pb.gif HTTP/1.0" 200 2326`))

View File

@ -148,7 +148,7 @@ func newGrokParser(metricName string,
cPatterns string, cPatterns string,
cPatternFiles []string, tZone string) (Parser, error) { cPatternFiles []string, tZone string) (Parser, error) {
parser := grok.Parser{ parser := grok.Parser{
Measurement: metricName, MetricName: metricName,
Patterns: patterns, Patterns: patterns,
NamedPatterns: nPatterns, NamedPatterns: nPatterns,
CustomPatterns: cPatterns, CustomPatterns: cPatterns,