Add millisecond unix time support to grok parser (#6476)
This commit is contained in:
parent
ddd79762ac
commit
b9a4ef7484
|
@ -105,6 +105,7 @@ Patterns that convert all captures to tags will result in points that can't be w
|
|||
- ts-rfc3339nano ("2006-01-02T15:04:05.999999999Z07:00")
|
||||
- ts-httpd ("02/Jan/2006:15:04:05 -0700")
|
||||
- ts-epoch (seconds since unix epoch, may contain decimal)
|
||||
- ts-epochmilli (milliseconds since unix epoch, may contain decimal)
|
||||
- ts-epochnano (nanoseconds since unix epoch)
|
||||
- ts-syslog ("Jan 02 15:04:05", parsed time is set to the current year)
|
||||
- ts-"CUSTOM"
|
||||
|
|
|
@ -48,7 +48,7 @@ func TestGrokParseLogFiles(t *testing.T) {
|
|||
Log: testutil.Logger{},
|
||||
GrokConfig: GrokConfig{
|
||||
MeasurementName: "logparser_grok",
|
||||
Patterns: []string{"%{TEST_LOG_A}", "%{TEST_LOG_B}"},
|
||||
Patterns: []string{"%{TEST_LOG_A}", "%{TEST_LOG_B}", "%{TEST_LOG_C}"},
|
||||
CustomPatternFiles: []string{thisdir + "testdata/test-patterns"},
|
||||
},
|
||||
FromBeginning: true,
|
||||
|
@ -162,6 +162,40 @@ func TestGrokParseLogFilesOneBad(t *testing.T) {
|
|||
})
|
||||
}
|
||||
|
||||
func TestGrokParseLogFiles_TimestampInEpochMilli(t *testing.T) {
|
||||
thisdir := getCurrentDir()
|
||||
|
||||
logparser := &LogParserPlugin{
|
||||
Log: testutil.Logger{},
|
||||
GrokConfig: GrokConfig{
|
||||
MeasurementName: "logparser_grok",
|
||||
Patterns: []string{"%{TEST_LOG_C}"},
|
||||
CustomPatternFiles: []string{thisdir + "testdata/test-patterns"},
|
||||
},
|
||||
FromBeginning: true,
|
||||
Files: []string{thisdir + "testdata/test_c.log"},
|
||||
}
|
||||
|
||||
acc := testutil.Accumulator{}
|
||||
acc.SetDebug(true)
|
||||
assert.NoError(t, logparser.Start(&acc))
|
||||
acc.Wait(1)
|
||||
|
||||
logparser.Stop()
|
||||
|
||||
acc.AssertContainsTaggedFields(t, "logparser_grok",
|
||||
map[string]interface{}{
|
||||
"clientip": "192.168.1.1",
|
||||
"myfloat": float64(1.25),
|
||||
"response_time": int64(5432),
|
||||
"myint": int64(101),
|
||||
},
|
||||
map[string]string{
|
||||
"response_code": "200",
|
||||
"path": thisdir + "testdata/test_c.log",
|
||||
})
|
||||
}
|
||||
|
||||
func getCurrentDir() string {
|
||||
_, filename, _, _ := runtime.Caller(1)
|
||||
return strings.Replace(filename, "logparser_test.go", "", 1)
|
||||
|
|
|
@ -12,3 +12,7 @@ TEST_LOG_B \[%{TEST_TIMESTAMP:timestamp:ts-"02/01/2006--15:04:05"}\] %{NUMBER:my
|
|||
|
||||
TEST_TIMESTAMP %{MONTHDAY}/%{MONTHNUM}/%{YEAR}--%{TIME}
|
||||
TEST_LOG_BAD \[%{TEST_TIMESTAMP:timestamp:ts-"02/01/2006--15:04:05"}\] %{NUMBER:myfloat:float} %{WORD:mystring:int} %{WORD:dropme:drop} %{WORD:nomodifier}
|
||||
|
||||
# Test C log line:
|
||||
# 1568723594631 1.25 200 192.168.1.1 5.432µs 101
|
||||
TEST_LOG_C %{POSINT:timestamp:ts-epochmilli} %{NUMBER:myfloat:float} %{RESPONSE_CODE} %{IPORHOST:clientip} %{RESPONSE_TIME} %{NUMBER:myint:int}
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
1568723594631 1.25 200 192.168.1.1 5.432µs 101
|
|
@ -50,6 +50,7 @@ You must capture at least one field per line.
|
|||
- ts-httpd ("02/Jan/2006:15:04:05 -0700")
|
||||
- ts-epoch (seconds since unix epoch, may contain decimal)
|
||||
- ts-epochnano (nanoseconds since unix epoch)
|
||||
- ts-epochmilli (milliseconds since unix epoch)
|
||||
- ts-syslog ("Jan 02 15:04:05", parsed time is set to the current year)
|
||||
- ts-"CUSTOM"
|
||||
|
||||
|
|
|
@ -28,12 +28,13 @@ var timeLayouts = map[string]string{
|
|||
"ts-rfc3339": "2006-01-02T15:04:05Z07:00",
|
||||
"ts-rfc3339nano": "2006-01-02T15:04:05.999999999Z07:00",
|
||||
"ts-httpd": "02/Jan/2006:15:04:05 -0700",
|
||||
// These three are not exactly "layouts", but they are special cases that
|
||||
// These four are not exactly "layouts", but they are special cases that
|
||||
// will get handled in the ParseLine function.
|
||||
"ts-epoch": "EPOCH",
|
||||
"ts-epochnano": "EPOCH_NANO",
|
||||
"ts-syslog": "SYSLOG_TIMESTAMP",
|
||||
"ts": "GENERIC_TIMESTAMP", // try parsing all known timestamp layouts.
|
||||
"ts-epoch": "EPOCH",
|
||||
"ts-epochnano": "EPOCH_NANO",
|
||||
"ts-epochmilli": "EPOCH_MILLI",
|
||||
"ts-syslog": "SYSLOG_TIMESTAMP",
|
||||
"ts": "GENERIC_TIMESTAMP", // try parsing all known timestamp layouts.
|
||||
}
|
||||
|
||||
const (
|
||||
|
@ -45,6 +46,7 @@ const (
|
|||
DURATION = "duration"
|
||||
DROP = "drop"
|
||||
EPOCH = "EPOCH"
|
||||
EPOCH_MILLI = "EPOCH_MILLI"
|
||||
EPOCH_NANO = "EPOCH_NANO"
|
||||
SYSLOG_TIMESTAMP = "SYSLOG_TIMESTAMP"
|
||||
GENERIC_TIMESTAMP = "GENERIC_TIMESTAMP"
|
||||
|
@ -297,6 +299,14 @@ func (p *Parser) ParseLine(line string) (telegraf.Metric, error) {
|
|||
ts = ts.Add(time.Duration(nanosec) * time.Nanosecond)
|
||||
}
|
||||
timestamp = ts
|
||||
case EPOCH_MILLI:
|
||||
ms, err := strconv.ParseInt(v, 10, 64)
|
||||
if err != nil {
|
||||
log.Printf("E! Error parsing %s to int: %s", v, err)
|
||||
} else {
|
||||
timestamp = time.Unix(0, ms*int64(time.Millisecond))
|
||||
fmt.Println(timestamp)
|
||||
}
|
||||
case EPOCH_NANO:
|
||||
iv, err := strconv.ParseInt(v, 10, 64)
|
||||
if err != nil {
|
||||
|
|
|
@ -277,6 +277,28 @@ func TestParsePatternsWithoutCustom(t *testing.T) {
|
|||
assert.Equal(t, time.Unix(0, 1466004605359052000), metricA.Time())
|
||||
}
|
||||
|
||||
func TestParseEpochMilli(t *testing.T) {
|
||||
p := &Parser{
|
||||
Patterns: []string{"%{MYAPP}"},
|
||||
CustomPatterns: `
|
||||
MYAPP %{POSINT:ts:ts-epochmilli} response_time=%{POSINT:response_time:int} mymetric=%{NUMBER:metric:float}
|
||||
`,
|
||||
}
|
||||
assert.NoError(t, p.Compile())
|
||||
|
||||
metricA, err := p.ParseLine(`1568540909963 response_time=20821 mymetric=10890.645`)
|
||||
require.NotNil(t, metricA)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t,
|
||||
map[string]interface{}{
|
||||
"response_time": int64(20821),
|
||||
"metric": float64(10890.645),
|
||||
},
|
||||
metricA.Fields())
|
||||
assert.Equal(t, map[string]string{}, metricA.Tags())
|
||||
assert.Equal(t, time.Unix(0, 1568540909963000000), metricA.Time())
|
||||
}
|
||||
|
||||
func TestParseEpochNano(t *testing.T) {
|
||||
p := &Parser{
|
||||
Patterns: []string{"%{MYAPP}"},
|
||||
|
|
Loading…
Reference in New Issue