add "field_tags" to json parser config

This commit is contained in:
Max U 2018-06-27 15:38:02 -07:00
parent 23523ffd10
commit 420dafd591
6 changed files with 52 additions and 16 deletions

View File

@ -104,9 +104,10 @@ but can be overridden using the `name_override` config option.
#### JSON Configuration:
The JSON data format supports specifying "tag keys". If specified, keys
will be searched for in the root-level of the JSON blob. If the key(s) exist,
they will be applied as tags to the Telegraf metrics.
The JSON data format supports specifying "tag keys" and "field keys". If specified, keys
will be searched for in the root-level and any nested lists of the JSON blob. If the key(s) exist,
they will be applied as tags or fields to the Telegraf metrics. If "field_keys" is not specified,
all int and float values will be set as fields by default.
For example, if you had this configuration:
@ -173,6 +174,7 @@ For example, if the following configuration:
"my_tag_1",
"my_tag_2"
]
field_keys = ["b_c"]
```
with this JSON output from a command:
@ -198,11 +200,11 @@ with this JSON output from a command:
]
```
Your Telegraf metrics would get tagged with "my_tag_1" and "my_tag_2"
Your Telegraf metrics would get tagged with "my_tag_1" and "my_tag_2" and fielded with "b_c"
```
exec_mycollector,my_tag_1=foo,my_tag_2=baz a=5,b_c=6
exec_mycollector,my_tag_1=bar,my_tag_2=baz a=7,b_c=8
exec_mycollector,my_tag_1=foo,my_tag_2=baz b_c=6
exec_mycollector,my_tag_1=bar,my_tag_2=baz b_c=8
```
# Value:

View File

@ -1261,6 +1261,18 @@ func buildParser(name string, tbl *ast.Table) (parsers.Parser, error) {
}
}
if node, ok := tbl.Fields["field_keys"]; ok {
if kv, ok := node.(*ast.KeyValue); ok {
if ary, ok := kv.Value.(*ast.Array); ok {
for _, elem := range ary.Value {
if str, ok := elem.(*ast.String); ok {
c.FieldKeys = append(c.FieldKeys, str.Value)
}
}
}
}
}
if node, ok := tbl.Fields["data_type"]; ok {
if kv, ok := node.(*ast.KeyValue); ok {
if str, ok := kv.Value.(*ast.String); ok {
@ -1344,6 +1356,7 @@ func buildParser(name string, tbl *ast.Table) (parsers.Parser, error) {
delete(tbl.Fields, "separator")
delete(tbl.Fields, "templates")
delete(tbl.Fields, "tag_keys")
delete(tbl.Fields, "field_keys")
delete(tbl.Fields, "data_type")
delete(tbl.Fields, "collectd_auth_file")
delete(tbl.Fields, "collectd_security_level")

View File

@ -143,7 +143,7 @@ func TestConfig_LoadDirectory(t *testing.T) {
"Testdata did not produce correct memcached metadata.")
ex := inputs.Inputs["exec"]().(*exec.Exec)
p, err := parsers.NewJSONParser("exec", nil, nil)
p, err := parsers.NewJSONParser("exec", nil, nil, nil)
assert.NoError(t, err)
ex.SetParser(p)
ex.Command = "/usr/bin/myothercollector --foo=bar"

View File

@ -20,6 +20,7 @@ var (
type JSONParser struct {
MetricName string
TagKeys []string
FieldKeys []string
DefaultTags map[string]string
}
@ -86,6 +87,17 @@ func (p *JSONParser) switchFieldToTag(tags map[string]string, fields map[string]
}
}
//if field_keys is specified, only those values should be reported as fields
if len(p.FieldKeys) > 0 {
nFields := make(map[string]interface{})
for _, name := range p.FieldKeys {
if fields[name] != nil {
nFields[name] = fields[name]
}
}
return tags, nFields
}
//remove any additional string/bool values from fields
for k := range fields {
switch fields[k].(type) {

View File

@ -454,13 +454,18 @@ func TestJSONParseNestedArray(t *testing.T) {
"avg_find_time": 4,
"tester": "work",
"tester2": "don't want this",
"tester3": 7.93
"tester3": {
"hello":"sup",
"fun":"money",
"break":9
}
}
}`
parser := JSONParser{
MetricName: "json_test",
TagKeys: []string{"total_devices", "total_threads", "shares_tester", "shares_tester3"},
TagKeys: []string{"total_devices", "total_threads", "shares_tester3_fun"},
FieldKeys: []string{"shares_tester", "shares_tester3_break"},
}
metrics, err := parser.Parse([]byte(testString))

View File

@ -56,6 +56,8 @@ type Config struct {
// TagKeys only apply to JSON data
TagKeys []string
// FieldKeys only apply to JSON
FieldKeys []string
// MetricName applies to JSON & value. This will be the name of the measurement.
MetricName string
@ -96,7 +98,7 @@ func NewParser(config *Config) (Parser, error) {
switch config.DataFormat {
case "json":
parser, err = NewJSONParser(config.MetricName,
config.TagKeys, config.DefaultTags)
config.TagKeys, config.FieldKeys, config.DefaultTags)
case "value":
parser, err = NewValueParser(config.MetricName,
config.DataType, config.DefaultTags)
@ -129,11 +131,13 @@ func NewParser(config *Config) (Parser, error) {
func NewJSONParser(
metricName string,
tagKeys []string,
fieldKeys []string,
defaultTags map[string]string,
) (Parser, error) {
parser := &json.JSONParser{
MetricName: metricName,
TagKeys: tagKeys,
FieldKeys: fieldKeys,
DefaultTags: defaultTags,
}
return parser, nil