address some of Daniel's comments
This commit is contained in:
parent
04f09d65bf
commit
8063b38b2d
|
@ -726,32 +726,32 @@ HTTPD_ERRORLOG %{HTTPD20_ERRORLOG}|%{HTTPD24_ERRORLOG}
|
|||
|
||||
#### Grok Configuration:
|
||||
```toml
|
||||
[inputs.reader]
|
||||
## This is a list of patterns to check the given log file(s) for.
|
||||
## Note that adding patterns here increases processing time. The most
|
||||
## efficient configuration is to have one pattern per logparser.
|
||||
## Other common built-in patterns are:
|
||||
## %{COMMON_LOG_FORMAT} (plain apache & nginx access logs)
|
||||
## %{COMBINED_LOG_FORMAT} (access logs + referrer & agent)
|
||||
patterns = ["%{COMBINED_LOG_FORMAT}"]
|
||||
[[inputs.reader]]
|
||||
## This is a list of patterns to check the given log file(s) for.
|
||||
## Note that adding patterns here increases processing time. The most
|
||||
## efficient configuration is to have one pattern per logparser.
|
||||
## Other common built-in patterns are:
|
||||
## %{COMMON_LOG_FORMAT} (plain apache & nginx access logs)
|
||||
## %{COMBINED_LOG_FORMAT} (access logs + referrer & agent)
|
||||
grok_patterns = ["%{COMBINED_LOG_FORMAT}"]
|
||||
|
||||
## Name of the outputted measurement name.
|
||||
name_override = "apache_access_log"
|
||||
## Name of the outputted measurement name.
|
||||
grok_name_override = "apache_access_log"
|
||||
|
||||
## Full path(s) to custom pattern files.
|
||||
custom_pattern_files = []
|
||||
## Full path(s) to custom pattern files.
|
||||
grok_custom_pattern_files = []
|
||||
|
||||
## Custom patterns can also be defined here. Put one pattern per line.
|
||||
custom_patterns = '''
|
||||
## Custom patterns can also be defined here. Put one pattern per line.
|
||||
grok_custom_patterns = '''
|
||||
|
||||
## Timezone allows you to provide an override for timestamps that
|
||||
## don't already include an offset
|
||||
## e.g. 04/06/2016 12:41:45 data one two 5.43µs
|
||||
##
|
||||
## Default: "" which renders UTC
|
||||
## Options are as follows:
|
||||
## 1. Local -- interpret based on machine localtime
|
||||
## 2. "Canada/Eastern" -- Unix TZ values like those found in https://en.wikipedia.org/wiki/List_of_tz_database_time_zones
|
||||
## 3. UTC -- or blank/unspecified, will return timestamp in UTC
|
||||
timezone = "Canada/Eastern"
|
||||
```
|
||||
## Timezone allows you to provide an override for timestamps that
|
||||
## don't already include an offset
|
||||
## e.g. 04/06/2016 12:41:45 data one two 5.43µs
|
||||
##
|
||||
## Default: "" which renders UTC
|
||||
## Options are as follows:
|
||||
## 1. Local -- interpret based on machine localtime
|
||||
## 2. "Canada/Eastern" -- Unix TZ values like those found in https://en.wikipedia.org/wiki/List_of_tz_database_time_zones
|
||||
## 3. UTC -- or blank/unspecified, will return timestamp in UTC
|
||||
grok_timezone = "Canada/Eastern"
|
||||
```
|
|
@ -1339,7 +1339,7 @@ func buildParser(name string, tbl *ast.Table) (parsers.Parser, error) {
|
|||
}
|
||||
|
||||
//for grok data_format
|
||||
if node, ok := tbl.Fields["named_patterns"]; ok {
|
||||
if node, ok := tbl.Fields["grok_named_patterns"]; ok {
|
||||
if kv, ok := node.(*ast.KeyValue); ok {
|
||||
if ary, ok := kv.Value.(*ast.Array); ok {
|
||||
for _, elem := range ary.Value {
|
||||
|
@ -1351,7 +1351,7 @@ func buildParser(name string, tbl *ast.Table) (parsers.Parser, error) {
|
|||
}
|
||||
}
|
||||
|
||||
if node, ok := tbl.Fields["patterns"]; ok {
|
||||
if node, ok := tbl.Fields["grok_patterns"]; ok {
|
||||
if kv, ok := node.(*ast.KeyValue); ok {
|
||||
if ary, ok := kv.Value.(*ast.Array); ok {
|
||||
for _, elem := range ary.Value {
|
||||
|
@ -1363,7 +1363,7 @@ func buildParser(name string, tbl *ast.Table) (parsers.Parser, error) {
|
|||
}
|
||||
}
|
||||
|
||||
if node, ok := tbl.Fields["custom_patterns"]; ok {
|
||||
if node, ok := tbl.Fields["grok_custom_patterns"]; ok {
|
||||
if kv, ok := node.(*ast.KeyValue); ok {
|
||||
if str, ok := kv.Value.(*ast.String); ok {
|
||||
c.CustomPatterns = str.Value
|
||||
|
@ -1371,7 +1371,7 @@ func buildParser(name string, tbl *ast.Table) (parsers.Parser, error) {
|
|||
}
|
||||
}
|
||||
|
||||
if node, ok := tbl.Fields["custom_pattern_files"]; ok {
|
||||
if node, ok := tbl.Fields["grok_custom_pattern_files"]; ok {
|
||||
if kv, ok := node.(*ast.KeyValue); ok {
|
||||
if ary, ok := kv.Value.(*ast.Array); ok {
|
||||
for _, elem := range ary.Value {
|
||||
|
@ -1383,7 +1383,7 @@ func buildParser(name string, tbl *ast.Table) (parsers.Parser, error) {
|
|||
}
|
||||
}
|
||||
|
||||
if node, ok := tbl.Fields["timezone"]; ok {
|
||||
if node, ok := tbl.Fields["grok_timezone"]; ok {
|
||||
if kv, ok := node.(*ast.KeyValue); ok {
|
||||
if str, ok := kv.Value.(*ast.String); ok {
|
||||
c.TimeZone = str.Value
|
||||
|
@ -1406,11 +1406,11 @@ func buildParser(name string, tbl *ast.Table) (parsers.Parser, error) {
|
|||
delete(tbl.Fields, "dropwizard_time_format")
|
||||
delete(tbl.Fields, "dropwizard_tags_path")
|
||||
delete(tbl.Fields, "dropwizard_tag_paths")
|
||||
delete(tbl.Fields, "named_patterns")
|
||||
delete(tbl.Fields, "patterns")
|
||||
delete(tbl.Fields, "custom_patterns")
|
||||
delete(tbl.Fields, "custom_pattern_files")
|
||||
delete(tbl.Fields, "timezone")
|
||||
delete(tbl.Fields, "grok_named_patterns")
|
||||
delete(tbl.Fields, "grok_patterns")
|
||||
delete(tbl.Fields, "grok_custom_patterns")
|
||||
delete(tbl.Fields, "grok_custom_pattern_files")
|
||||
delete(tbl.Fields, "grok_timezone")
|
||||
|
||||
return parsers.NewParser(c)
|
||||
}
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
package reader
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
|
||||
"github.com/influxdata/telegraf"
|
||||
"github.com/influxdata/telegraf/internal/globpath"
|
||||
|
@ -50,14 +50,8 @@ func (r *Reader) Gather(acc telegraf.Accumulator) error {
|
|||
return err
|
||||
}
|
||||
|
||||
for i, m := range metrics {
|
||||
|
||||
//error if m is nil
|
||||
if m == nil {
|
||||
log.Printf("E! Metric could not be parsed from: %v, on line %v", k, i)
|
||||
continue
|
||||
}
|
||||
acc.AddFields(m.Name(), m.Fields(), m.Tags())
|
||||
for _, m := range metrics {
|
||||
acc.AddFields(m.Name(), m.Fields(), m.Tags(), m.Time())
|
||||
}
|
||||
}
|
||||
return nil
|
||||
|
@ -67,13 +61,12 @@ func (r *Reader) SetParser(p parsers.Parser) {
|
|||
r.parser = p
|
||||
}
|
||||
|
||||
func (r *Reader) refreshFilePaths() {
|
||||
func (r *Reader) refreshFilePaths() error {
|
||||
var allFiles []string
|
||||
for _, filepath := range r.Filepaths {
|
||||
g, err := globpath.Compile(filepath)
|
||||
if err != nil {
|
||||
log.Printf("E! Error Glob %s failed to compile, %s", filepath, err)
|
||||
continue
|
||||
return fmt.Errorf("E! Error Glob: %v could not be compiled, %s", filepath, err)
|
||||
}
|
||||
files := g.Match()
|
||||
|
||||
|
@ -83,13 +76,13 @@ func (r *Reader) refreshFilePaths() {
|
|||
}
|
||||
|
||||
r.Filenames = allFiles
|
||||
return nil
|
||||
}
|
||||
|
||||
//requires that Parser has been compiled
|
||||
func (r *Reader) readMetric(filename string) ([]telegraf.Metric, error) {
|
||||
fileContents, err := ioutil.ReadFile(filename)
|
||||
if err != nil {
|
||||
log.Printf("E! File could not be opened: %v", filename)
|
||||
return nil, fmt.Errorf("E! Error file: %v could not be read, %s", filename, err)
|
||||
}
|
||||
return r.parser.Parse(fileContents)
|
||||
|
||||
|
|
|
@ -71,7 +71,7 @@ type Parser struct {
|
|||
NamedPatterns []string
|
||||
CustomPatterns string
|
||||
CustomPatternFiles []string
|
||||
Measurement string
|
||||
MetricName string
|
||||
|
||||
// Timezone is an optional component to help render log dates to
|
||||
// your chosen zone.
|
||||
|
@ -167,10 +167,6 @@ func (p *Parser) Compile() error {
|
|||
p.addCustomPatterns(scanner)
|
||||
}
|
||||
|
||||
if p.Measurement == "" {
|
||||
p.Measurement = "logparser_grok"
|
||||
}
|
||||
|
||||
p.loc, err = time.LoadLocation(p.Timezone)
|
||||
if err != nil {
|
||||
log.Printf("W! improper timezone supplied (%s), setting loc to UTC", p.Timezone)
|
||||
|
@ -348,7 +344,7 @@ func (p *Parser) ParseLine(line string) (telegraf.Metric, error) {
|
|||
return nil, fmt.Errorf("logparser_grok: must have one or more fields")
|
||||
}
|
||||
|
||||
return metric.New(p.Measurement, tags, fields, p.tsModder.tsMod(timestamp))
|
||||
return metric.New(p.MetricName, tags, fields, p.tsModder.tsMod(timestamp))
|
||||
}
|
||||
|
||||
func (p *Parser) Parse(buf []byte) ([]telegraf.Metric, error) {
|
||||
|
|
|
@ -9,8 +9,8 @@ import (
|
|||
|
||||
func TestGrokParse(t *testing.T) {
|
||||
parser := Parser{
|
||||
Measurement: "t_met",
|
||||
Patterns: []string{"%{COMMON_LOG_FORMAT}"},
|
||||
MetricName: "t_met",
|
||||
Patterns: []string{"%{COMMON_LOG_FORMAT}"},
|
||||
}
|
||||
parser.Compile()
|
||||
metrics, err := parser.Parse([]byte(`127.0.0.1 user-identifier frank [10/Oct/2000:13:55:36 -0700] "GET /apache_pb.gif HTTP/1.0" 200 2326`))
|
||||
|
|
|
@ -148,7 +148,7 @@ func newGrokParser(metricName string,
|
|||
cPatterns string,
|
||||
cPatternFiles []string, tZone string) (Parser, error) {
|
||||
parser := grok.Parser{
|
||||
Measurement: metricName,
|
||||
MetricName: metricName,
|
||||
Patterns: patterns,
|
||||
NamedPatterns: nPatterns,
|
||||
CustomPatterns: cPatterns,
|
||||
|
|
Loading…
Reference in New Issue