Fix parse of unix timestamp with more than ns precision (#5826)
This commit is contained in:
parent
3e0efdac39
commit
e52f7056ba
|
@ -348,25 +348,18 @@ func ParseTimestamp(timestamp interface{}, format string) (time.Time, error) {
|
||||||
// format = "unix_ns": epoch is assumed to be in nanoseconds and can come as number or string. Cannot have a decimal part.
|
// format = "unix_ns": epoch is assumed to be in nanoseconds and can come as number or string. Cannot have a decimal part.
|
||||||
func ParseTimestampWithLocation(timestamp interface{}, format string, location string) (time.Time, error) {
|
func ParseTimestampWithLocation(timestamp interface{}, format string, location string) (time.Time, error) {
|
||||||
timeInt, timeFractional := int64(0), int64(0)
|
timeInt, timeFractional := int64(0), int64(0)
|
||||||
timeEpochStr, ok := timestamp.(string)
|
|
||||||
var err error
|
|
||||||
|
|
||||||
if !ok {
|
switch ts := timestamp.(type) {
|
||||||
timeEpochFloat, ok := timestamp.(float64)
|
case string:
|
||||||
if !ok {
|
var err error
|
||||||
return time.Time{}, fmt.Errorf("time: %v could not be converted to string nor float64", timestamp)
|
splitted := regexp.MustCompile("[.,]").Split(ts, 2)
|
||||||
}
|
|
||||||
intPart, frac := math.Modf(timeEpochFloat)
|
|
||||||
timeInt, timeFractional = int64(intPart), int64(frac*1e9)
|
|
||||||
} else {
|
|
||||||
splitted := regexp.MustCompile("[.,]").Split(timeEpochStr, 2)
|
|
||||||
timeInt, err = strconv.ParseInt(splitted[0], 10, 64)
|
timeInt, err = strconv.ParseInt(splitted[0], 10, 64)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
loc, err := time.LoadLocation(location)
|
loc, err := time.LoadLocation(location)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return time.Time{}, fmt.Errorf("location: %s could not be loaded as a location", location)
|
return time.Time{}, fmt.Errorf("location: %s could not be loaded as a location", location)
|
||||||
}
|
}
|
||||||
return time.ParseInLocation(format, timeEpochStr, loc)
|
return time.ParseInLocation(format, ts, loc)
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(splitted) == 2 {
|
if len(splitted) == 2 {
|
||||||
|
@ -380,7 +373,15 @@ func ParseTimestampWithLocation(timestamp interface{}, format string, location s
|
||||||
return time.Time{}, err
|
return time.Time{}, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
case int64:
|
||||||
|
timeInt = ts
|
||||||
|
case float64:
|
||||||
|
intPart, frac := math.Modf(ts)
|
||||||
|
timeInt, timeFractional = int64(intPart), int64(frac*1e9)
|
||||||
|
default:
|
||||||
|
return time.Time{}, fmt.Errorf("time: %v could not be converted to string nor float64", timestamp)
|
||||||
}
|
}
|
||||||
|
|
||||||
if strings.EqualFold(format, "unix") {
|
if strings.EqualFold(format, "unix") {
|
||||||
return time.Unix(timeInt, timeFractional).UTC(), nil
|
return time.Unix(timeInt, timeFractional).UTC(), nil
|
||||||
} else if strings.EqualFold(format, "unix_ms") {
|
} else if strings.EqualFold(format, "unix_ms") {
|
||||||
|
|
|
@ -225,29 +225,25 @@ outer:
|
||||||
// to the format.
|
// to the format.
|
||||||
func parseTimestamp(timeFunc func() time.Time, recordFields map[string]interface{},
|
func parseTimestamp(timeFunc func() time.Time, recordFields map[string]interface{},
|
||||||
timestampColumn, timestampFormat string,
|
timestampColumn, timestampFormat string,
|
||||||
) (metricTime time.Time, err error) {
|
) (time.Time, error) {
|
||||||
metricTime = timeFunc()
|
|
||||||
|
|
||||||
if timestampColumn != "" {
|
if timestampColumn != "" {
|
||||||
if recordFields[timestampColumn] == nil {
|
if recordFields[timestampColumn] == nil {
|
||||||
err = fmt.Errorf("timestamp column: %v could not be found", timestampColumn)
|
return time.Time{}, fmt.Errorf("timestamp column: %v could not be found", timestampColumn)
|
||||||
return
|
|
||||||
}
|
}
|
||||||
|
|
||||||
tStr := fmt.Sprintf("%v", recordFields[timestampColumn])
|
|
||||||
|
|
||||||
switch timestampFormat {
|
switch timestampFormat {
|
||||||
case "":
|
case "":
|
||||||
err = fmt.Errorf("timestamp format must be specified")
|
return time.Time{}, fmt.Errorf("timestamp format must be specified")
|
||||||
return
|
|
||||||
default:
|
default:
|
||||||
metricTime, err = internal.ParseTimestamp(tStr, timestampFormat)
|
metricTime, err := internal.ParseTimestamp(recordFields[timestampColumn], timestampFormat)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return
|
return time.Time{}, err
|
||||||
|
}
|
||||||
|
return metricTime, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
return
|
return timeFunc(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetDefaultTags set the DefaultTags
|
// SetDefaultTags set the DefaultTags
|
||||||
|
|
|
@ -5,6 +5,7 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/influxdata/telegraf"
|
||||||
"github.com/influxdata/telegraf/metric"
|
"github.com/influxdata/telegraf/metric"
|
||||||
"github.com/influxdata/telegraf/testutil"
|
"github.com/influxdata/telegraf/testutil"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
@ -322,3 +323,30 @@ func TestParseStream(t *testing.T) {
|
||||||
DefaultTime(),
|
DefaultTime(),
|
||||||
), metric)
|
), metric)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestTimestampUnixFloatPrecision(t *testing.T) {
|
||||||
|
p := Parser{
|
||||||
|
MetricName: "csv",
|
||||||
|
ColumnNames: []string{"time", "value"},
|
||||||
|
TimestampColumn: "time",
|
||||||
|
TimestampFormat: "unix",
|
||||||
|
TimeFunc: DefaultTime,
|
||||||
|
}
|
||||||
|
data := `1551129661.95456123352050781250,42`
|
||||||
|
|
||||||
|
expected := []telegraf.Metric{
|
||||||
|
testutil.MustMetric(
|
||||||
|
"csv",
|
||||||
|
map[string]string{},
|
||||||
|
map[string]interface{}{
|
||||||
|
"value": 42,
|
||||||
|
"time": 1551129661.954561233,
|
||||||
|
},
|
||||||
|
time.Unix(1551129661, 954561233),
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
metrics, err := p.Parse([]byte(data))
|
||||||
|
require.NoError(t, err)
|
||||||
|
testutil.RequireMetricsEqual(t, expected, metrics)
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in New Issue