Add support for comma in logparser timestamp format (#4311)
This commit is contained in:
parent
1bd41ef3ce
commit
61e197d254
|
@ -108,7 +108,9 @@ You must capture at least one field per line.
|
|||
- ts-"CUSTOM"
|
||||
|
||||
CUSTOM time layouts must be within quotes and be the representation of the
|
||||
"reference time", which is `Mon Jan 2 15:04:05 -0700 MST 2006`
|
||||
"reference time", which is `Mon Jan 2 15:04:05 -0700 MST 2006`.
|
||||
To match a comma decimal point you can use a period. For example `%{TIMESTAMP:timestamp:ts-"2006-01-02 15:04:05.000"}` can be used to match `"2018-01-02 15:04:05,000"`
|
||||
To match a comma decimal point you can use a period in the pattern string.
|
||||
See https://golang.org/pkg/time/#Parse for more details.
|
||||
|
||||
Telegraf has many of its own [built-in patterns](./grok/patterns/influx-patterns),
|
||||
|
|
|
@ -335,6 +335,9 @@ func (p *Parser) ParseLine(line string) (telegraf.Metric, error) {
|
|||
case DROP:
|
||||
// goodbye!
|
||||
default:
|
||||
// Replace commas with dot character
|
||||
v = strings.Replace(v, ",", ".", -1)
|
||||
|
||||
ts, err := time.ParseInLocation(t, v, p.loc)
|
||||
if err == nil {
|
||||
timestamp = ts
|
||||
|
|
|
@ -982,3 +982,21 @@ func TestSyslogTimestampParser(t *testing.T) {
|
|||
require.NotNil(t, m)
|
||||
require.Equal(t, 2018, m.Time().Year())
|
||||
}
|
||||
|
||||
func TestReplaceTimestampComma(t *testing.T) {
|
||||
|
||||
p := &Parser{
|
||||
Patterns: []string{`%{TIMESTAMP_ISO8601:timestamp:ts-"2006-01-02 15:04:05.000"} successfulMatches=%{NUMBER:value:int}`},
|
||||
}
|
||||
|
||||
require.NoError(t, p.Compile())
|
||||
m, err := p.ParseLine("2018-02-21 13:10:34,555 successfulMatches=1")
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, m)
|
||||
|
||||
require.Equal(t, 2018, m.Time().Year())
|
||||
require.Equal(t, 13, m.Time().Hour())
|
||||
require.Equal(t, 34, m.Time().Second())
|
||||
//Convert Nanosecond to milisecond for compare
|
||||
require.Equal(t, 555, m.Time().Nanosecond()/1000000)
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue