2016-02-06 00:36:35 +00:00
|
|
|
package json
|
|
|
|
|
|
|
|
import (
|
2016-11-15 18:02:55 +00:00
|
|
|
"bytes"
|
2016-02-06 00:36:35 +00:00
|
|
|
"encoding/json"
|
|
|
|
"fmt"
|
2018-06-14 20:17:32 +00:00
|
|
|
"log"
|
2018-09-21 22:47:41 +00:00
|
|
|
"math"
|
|
|
|
"regexp"
|
2016-02-06 00:36:35 +00:00
|
|
|
"strconv"
|
|
|
|
"strings"
|
|
|
|
"time"
|
|
|
|
|
|
|
|
"github.com/influxdata/telegraf"
|
2016-11-22 12:51:57 +00:00
|
|
|
"github.com/influxdata/telegraf/metric"
|
2018-09-07 00:44:33 +00:00
|
|
|
"github.com/pkg/errors"
|
2018-08-23 02:26:48 +00:00
|
|
|
"github.com/tidwall/gjson"
|
2016-02-06 00:36:35 +00:00
|
|
|
)
|
|
|
|
|
2018-05-03 18:40:28 +00:00
|
|
|
var (
|
|
|
|
utf8BOM = []byte("\xef\xbb\xbf")
|
|
|
|
)
|
|
|
|
|
2016-02-06 00:36:35 +00:00
|
|
|
type JSONParser struct {
|
2018-08-23 02:26:48 +00:00
|
|
|
MetricName string
|
|
|
|
TagKeys []string
|
|
|
|
StringFields []string
|
|
|
|
JSONNameKey string
|
|
|
|
JSONQuery string
|
|
|
|
JSONTimeKey string
|
|
|
|
JSONTimeFormat string
|
|
|
|
DefaultTags map[string]string
|
2016-02-06 00:36:35 +00:00
|
|
|
}
|
|
|
|
|
2016-11-15 18:02:55 +00:00
|
|
|
func (p *JSONParser) parseArray(buf []byte) ([]telegraf.Metric, error) {
|
2016-02-06 00:36:35 +00:00
|
|
|
metrics := make([]telegraf.Metric, 0)
|
|
|
|
|
2016-11-15 18:02:55 +00:00
|
|
|
var jsonOut []map[string]interface{}
|
2016-02-06 00:36:35 +00:00
|
|
|
err := json.Unmarshal(buf, &jsonOut)
|
|
|
|
if err != nil {
|
2016-11-15 18:02:55 +00:00
|
|
|
err = fmt.Errorf("unable to parse out as JSON Array, %s", err)
|
2016-02-06 00:36:35 +00:00
|
|
|
return nil, err
|
|
|
|
}
|
2016-11-15 18:02:55 +00:00
|
|
|
for _, item := range jsonOut {
|
|
|
|
metrics, err = p.parseObject(metrics, item)
|
2018-08-23 02:26:48 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2016-11-15 18:02:55 +00:00
|
|
|
}
|
|
|
|
return metrics, nil
|
|
|
|
}
|
|
|
|
|
2018-09-07 00:44:33 +00:00
|
|
|
// format = "unix": epoch is assumed to be in seconds and can come as number or string. Can have a decimal part.
|
|
|
|
// format = "unix_ms": epoch is assumed to be in milliseconds and can come as number or string. Cannot have a decimal part.
|
|
|
|
func parseUnixTimestamp(jsonValue interface{}, format string) (time.Time, error) {
|
|
|
|
timeInt, timeFractional := int64(0), int64(0)
|
|
|
|
timeEpochStr, ok := jsonValue.(string)
|
|
|
|
var err error
|
|
|
|
|
|
|
|
if !ok {
|
|
|
|
timeEpochFloat, ok := jsonValue.(float64)
|
|
|
|
if !ok {
|
|
|
|
err := fmt.Errorf("time: %v could not be converted to string nor float64", jsonValue)
|
|
|
|
return time.Time{}, err
|
|
|
|
}
|
|
|
|
intPart, frac := math.Modf(timeEpochFloat)
|
|
|
|
timeInt, timeFractional = int64(intPart), int64(frac*1e9)
|
|
|
|
} else {
|
|
|
|
splitted := regexp.MustCompile("[.,]").Split(timeEpochStr, 2)
|
|
|
|
timeInt, err = strconv.ParseInt(splitted[0], 10, 64)
|
|
|
|
if err != nil {
|
|
|
|
return time.Time{}, err
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(splitted) == 2 {
|
|
|
|
if len(splitted[1]) > 9 {
|
|
|
|
splitted[1] = splitted[1][:9] //truncates decimal part to nanoseconds precision
|
|
|
|
}
|
|
|
|
nanosecStr := splitted[1] + strings.Repeat("0", 9-len(splitted[1])) //adds 0's to the right to obtain a valid number of nanoseconds
|
|
|
|
|
|
|
|
timeFractional, err = strconv.ParseInt(nanosecStr, 10, 64)
|
|
|
|
if err != nil {
|
|
|
|
return time.Time{}, err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if strings.EqualFold(format, "unix") {
|
|
|
|
return time.Unix(timeInt, timeFractional).UTC(), nil
|
|
|
|
} else if strings.EqualFold(format, "unix_ms") {
|
|
|
|
return time.Unix(timeInt/1000, (timeInt%1000)*1e6).UTC(), nil
|
|
|
|
} else {
|
|
|
|
return time.Time{}, errors.New("Invalid unix format")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-11-15 18:02:55 +00:00
|
|
|
func (p *JSONParser) parseObject(metrics []telegraf.Metric, jsonOut map[string]interface{}) ([]telegraf.Metric, error) {
|
2016-02-06 00:36:35 +00:00
|
|
|
tags := make(map[string]string)
|
|
|
|
for k, v := range p.DefaultTags {
|
|
|
|
tags[k] = v
|
|
|
|
}
|
|
|
|
|
|
|
|
f := JSONFlattener{}
|
2018-06-14 20:17:32 +00:00
|
|
|
err := f.FullFlattenJSON("", jsonOut, true, true)
|
2016-02-06 00:36:35 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2018-08-23 02:26:48 +00:00
|
|
|
//checks if json_name_key is set
|
|
|
|
if p.JSONNameKey != "" {
|
2018-09-21 22:47:41 +00:00
|
|
|
switch field := f.Fields[p.JSONNameKey].(type) {
|
|
|
|
case string:
|
|
|
|
p.MetricName = field
|
|
|
|
}
|
2018-08-23 02:26:48 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
//if time key is specified, set it to nTime
|
|
|
|
nTime := time.Now().UTC()
|
|
|
|
if p.JSONTimeKey != "" {
|
|
|
|
if p.JSONTimeFormat == "" {
|
|
|
|
err := fmt.Errorf("use of 'json_time_key' requires 'json_time_format'")
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
if f.Fields[p.JSONTimeKey] == nil {
|
|
|
|
err := fmt.Errorf("JSON time key could not be found")
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2018-09-07 00:44:33 +00:00
|
|
|
if strings.EqualFold(p.JSONTimeFormat, "unix") || strings.EqualFold(p.JSONTimeFormat, "unix_ms") {
|
|
|
|
nTime, err = parseUnixTimestamp(f.Fields[p.JSONTimeKey], p.JSONTimeFormat)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
timeStr, ok := f.Fields[p.JSONTimeKey].(string)
|
|
|
|
if !ok {
|
|
|
|
err := fmt.Errorf("time: %v could not be converted to string", f.Fields[p.JSONTimeKey])
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
nTime, err = time.Parse(p.JSONTimeFormat, timeStr)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2018-08-23 02:26:48 +00:00
|
|
|
}
|
2018-06-14 20:17:32 +00:00
|
|
|
|
2018-08-23 02:26:48 +00:00
|
|
|
//if the year is 0, set to current year
|
|
|
|
if nTime.Year() == 0 {
|
|
|
|
nTime = nTime.AddDate(time.Now().Year(), 0, 0)
|
|
|
|
}
|
|
|
|
}
|
2016-02-06 00:36:35 +00:00
|
|
|
|
2018-08-23 02:26:48 +00:00
|
|
|
tags, nFields := p.switchFieldToTag(tags, f.Fields)
|
|
|
|
metric, err := metric.New(p.MetricName, tags, nFields, nTime)
|
2016-02-06 00:36:35 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return append(metrics, metric), nil
|
|
|
|
}
|
|
|
|
|
2018-06-14 20:17:32 +00:00
|
|
|
//will take in field map with strings and bools,
|
|
|
|
//search for TagKeys that match fieldnames and add them to tags
|
|
|
|
//will delete any strings/bools that shouldn't be fields
|
|
|
|
//assumes that any non-numeric values in TagKeys should be displayed as tags
|
|
|
|
func (p *JSONParser) switchFieldToTag(tags map[string]string, fields map[string]interface{}) (map[string]string, map[string]interface{}) {
|
|
|
|
for _, name := range p.TagKeys {
|
|
|
|
//switch any fields in tagkeys into tags
|
|
|
|
if fields[name] == nil {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
switch value := fields[name].(type) {
|
|
|
|
case string:
|
|
|
|
tags[name] = value
|
|
|
|
delete(fields, name)
|
|
|
|
case bool:
|
|
|
|
tags[name] = strconv.FormatBool(value)
|
|
|
|
delete(fields, name)
|
|
|
|
case float64:
|
|
|
|
tags[name] = strconv.FormatFloat(value, 'f', -1, 64)
|
|
|
|
delete(fields, name)
|
|
|
|
default:
|
|
|
|
log.Printf("E! [parsers.json] Unrecognized type %T", value)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
//remove any additional string/bool values from fields
|
|
|
|
for k := range fields {
|
2018-08-23 02:26:48 +00:00
|
|
|
//check if field is in StringFields
|
|
|
|
sField := false
|
|
|
|
for _, v := range p.StringFields {
|
|
|
|
if v == k {
|
|
|
|
sField = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if sField {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2018-06-14 20:17:32 +00:00
|
|
|
switch fields[k].(type) {
|
|
|
|
case string:
|
|
|
|
delete(fields, k)
|
|
|
|
case bool:
|
|
|
|
delete(fields, k)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return tags, fields
|
|
|
|
}
|
|
|
|
|
2016-11-15 18:02:55 +00:00
|
|
|
func (p *JSONParser) Parse(buf []byte) ([]telegraf.Metric, error) {
|
2018-08-23 02:26:48 +00:00
|
|
|
if p.JSONQuery != "" {
|
|
|
|
result := gjson.GetBytes(buf, p.JSONQuery)
|
|
|
|
buf = []byte(result.Raw)
|
|
|
|
if !result.IsArray() && !result.IsObject() {
|
|
|
|
err := fmt.Errorf("E! Query path must lead to a JSON object or array of objects, but lead to: %v", result.Type)
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-09-26 22:58:33 +00:00
|
|
|
buf = bytes.TrimSpace(buf)
|
2018-05-03 18:40:28 +00:00
|
|
|
buf = bytes.TrimPrefix(buf, utf8BOM)
|
2017-09-26 22:58:33 +00:00
|
|
|
if len(buf) == 0 {
|
|
|
|
return make([]telegraf.Metric, 0), nil
|
|
|
|
}
|
2016-11-15 18:02:55 +00:00
|
|
|
|
|
|
|
if !isarray(buf) {
|
|
|
|
metrics := make([]telegraf.Metric, 0)
|
|
|
|
var jsonOut map[string]interface{}
|
|
|
|
err := json.Unmarshal(buf, &jsonOut)
|
|
|
|
if err != nil {
|
|
|
|
err = fmt.Errorf("unable to parse out as JSON, %s", err)
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return p.parseObject(metrics, jsonOut)
|
|
|
|
}
|
|
|
|
return p.parseArray(buf)
|
|
|
|
}
|
|
|
|
|
2016-02-06 00:36:35 +00:00
|
|
|
func (p *JSONParser) ParseLine(line string) (telegraf.Metric, error) {
|
|
|
|
metrics, err := p.Parse([]byte(line + "\n"))
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(metrics) < 1 {
|
2018-08-23 02:26:48 +00:00
|
|
|
return nil, fmt.Errorf("can not parse the line: %s, for data format: json ", line)
|
2016-02-06 00:36:35 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return metrics[0], nil
|
|
|
|
}
|
|
|
|
|
2016-02-09 22:03:46 +00:00
|
|
|
func (p *JSONParser) SetDefaultTags(tags map[string]string) {
|
|
|
|
p.DefaultTags = tags
|
|
|
|
}
|
|
|
|
|
2016-02-06 00:36:35 +00:00
|
|
|
type JSONFlattener struct {
|
|
|
|
Fields map[string]interface{}
|
|
|
|
}
|
|
|
|
|
2016-12-20 16:30:03 +00:00
|
|
|
// FlattenJSON flattens nested maps/interfaces into a fields map (ignoring bools and string)
|
2016-02-06 00:36:35 +00:00
|
|
|
func (f *JSONFlattener) FlattenJSON(
|
2016-12-20 16:30:03 +00:00
|
|
|
fieldname string,
|
|
|
|
v interface{}) error {
|
|
|
|
if f.Fields == nil {
|
|
|
|
f.Fields = make(map[string]interface{})
|
|
|
|
}
|
2018-06-14 20:17:32 +00:00
|
|
|
|
2016-12-20 16:30:03 +00:00
|
|
|
return f.FullFlattenJSON(fieldname, v, false, false)
|
|
|
|
}
|
|
|
|
|
|
|
|
// FullFlattenJSON flattens nested maps/interfaces into a fields map (including bools and string)
|
|
|
|
func (f *JSONFlattener) FullFlattenJSON(
|
2016-02-06 00:36:35 +00:00
|
|
|
fieldname string,
|
|
|
|
v interface{},
|
2016-12-20 16:30:03 +00:00
|
|
|
convertString bool,
|
|
|
|
convertBool bool,
|
2016-02-06 00:36:35 +00:00
|
|
|
) error {
|
|
|
|
if f.Fields == nil {
|
|
|
|
f.Fields = make(map[string]interface{})
|
|
|
|
}
|
|
|
|
fieldname = strings.Trim(fieldname, "_")
|
|
|
|
switch t := v.(type) {
|
|
|
|
case map[string]interface{}:
|
|
|
|
for k, v := range t {
|
2016-12-20 16:30:03 +00:00
|
|
|
err := f.FullFlattenJSON(fieldname+"_"+k+"_", v, convertString, convertBool)
|
2016-02-06 00:36:35 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
case []interface{}:
|
|
|
|
for i, v := range t {
|
|
|
|
k := strconv.Itoa(i)
|
2016-12-20 16:30:03 +00:00
|
|
|
err := f.FullFlattenJSON(fieldname+"_"+k+"_", v, convertString, convertBool)
|
2016-02-06 00:36:35 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
}
|
|
|
|
case float64:
|
|
|
|
f.Fields[fieldname] = t
|
2016-12-20 16:30:03 +00:00
|
|
|
case string:
|
|
|
|
if convertString {
|
|
|
|
f.Fields[fieldname] = v.(string)
|
|
|
|
} else {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
case bool:
|
|
|
|
if convertBool {
|
|
|
|
f.Fields[fieldname] = v.(bool)
|
|
|
|
} else {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
case nil:
|
2016-02-06 00:36:35 +00:00
|
|
|
return nil
|
|
|
|
default:
|
|
|
|
return fmt.Errorf("JSON Flattener: got unexpected type %T with value %v (%s)",
|
|
|
|
t, t, fieldname)
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
2016-11-15 18:02:55 +00:00
|
|
|
|
|
|
|
func isarray(buf []byte) bool {
|
|
|
|
ia := bytes.IndexByte(buf, '[')
|
|
|
|
ib := bytes.IndexByte(buf, '{')
|
|
|
|
if ia > -1 && ia < ib {
|
|
|
|
return true
|
|
|
|
} else {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
}
|