add new consumer related information to readme.md
add new consumer related information to readme.md
This commit is contained in:
parent
b6926c36e8
commit
97fcfc0e0a
|
@ -106,6 +106,8 @@ The JSON data format supports specifying "tag keys". If specified, keys
|
|||
will be searched for in the root-level of the JSON blob. If the key(s) exist,
|
||||
they will be applied as tags to the Telegraf metrics.
|
||||
|
||||
JSON data format can specify the timestamp by "timestamp_selector" and then parse it using "timestamp_formatter"; this could be useful when dealing with metrics not generated locally.
|
||||
|
||||
For example, if you had this configuration:
|
||||
|
||||
```toml
|
||||
|
@ -127,6 +129,11 @@ For example, if you had this configuration:
|
|||
"my_tag_1",
|
||||
"my_tag_2"
|
||||
]
|
||||
|
||||
timestamp_selector = "@timestamp"
|
||||
## for more information about timestamp formatter, please refer to:
|
||||
## https://golang.org/src/time/format.go
|
||||
timestamp_formatter = "2006-01-02T15:04:05Z07:00"
|
||||
```
|
||||
|
||||
with this JSON output from a command:
|
||||
|
@ -137,14 +144,15 @@ with this JSON output from a command:
|
|||
"b": {
|
||||
"c": 6
|
||||
},
|
||||
"my_tag_1": "foo"
|
||||
"my_tag_1": "foo",
|
||||
"@timestamp": "2016-07-27T16:46:00.554Z"
|
||||
}
|
||||
```
|
||||
|
||||
Your Telegraf metrics would get tagged with "my_tag_1"
|
||||
Your Telegraf metrics would get tagged with "my_tag_1" and timestamp
|
||||
|
||||
```
|
||||
exec_mycollector,my_tag_1=foo a=5,b_c=6
|
||||
exec_mycollector,my_tag_1=foo a=5,b_c=6 1469637960554000000
|
||||
```
|
||||
|
||||
# Value:
|
||||
|
|
|
@ -6,11 +6,15 @@ line protocol. [Consumer Group](http://godoc.org/github.com/wvanbergen/kafka/con
|
|||
is used to talk to the Kafka cluster so multiple instances of telegraf can read
|
||||
from the same topic in parallel.
|
||||
|
||||
## Configuration
|
||||
Now supports kafka new consumer (version 0.9+) with TLS
|
||||
|
||||
## Configuration[0.8]
|
||||
|
||||
```toml
|
||||
# Read metrics from Kafka topic(s)
|
||||
[[inputs.kafka_consumer]]
|
||||
## is new consumer?
|
||||
new_consumer = false
|
||||
## topic(s) to consume
|
||||
topics = ["telegraf"]
|
||||
## an array of Zookeeper connection strings
|
||||
|
@ -30,6 +34,41 @@ from the same topic in parallel.
|
|||
data_format = "influx"
|
||||
```
|
||||
|
||||
|
||||
|
||||
## Configuration[0.9+]
|
||||
|
||||
```toml
|
||||
# Read metrics from Kafka topic(s)
|
||||
[[inputs.kafka_consumer]]
|
||||
## is new consumer?
|
||||
new_consumer = true
|
||||
## topic(s) to consume
|
||||
topics = ["telegraf"]
|
||||
## an array of kafka 0.9+ brokers
|
||||
broker_list = ["localhost:9092"]
|
||||
## the name of the consumer group
|
||||
consumer_group = "telegraf_kafka_consumer_group"
|
||||
## Offset (must be either "oldest" or "newest")
|
||||
offset = "oldest"
|
||||
|
||||
## Optional SSL Config
|
||||
ssl_ca = "/etc/telegraf/ca.pem"
|
||||
ssl_cert = "/etc/telegraf/cert.pem"
|
||||
ssl_key = "/etc/telegraf/cert.key"
|
||||
## Use SSL but skip chain & host verification
|
||||
insecure_skip_verify = false
|
||||
|
||||
## Data format to consume.
|
||||
|
||||
## Each data format has it's own unique set of configuration options, read
|
||||
## more about them here:
|
||||
## https://github.com/influxdata/telegraf/blob/master/docs/DATA_FORMATS_INPUT.md
|
||||
data_format = "influx"
|
||||
```
|
||||
|
||||
|
||||
|
||||
## Testing
|
||||
|
||||
Running integration tests requires running Zookeeper & Kafka. See Makefile
|
||||
|
|
|
@ -72,7 +72,7 @@ type Kafka struct {
|
|||
|
||||
var sampleConfig = `
|
||||
## is new consumer?
|
||||
new_consumer = true
|
||||
new_consumer = false
|
||||
## topic(s) to consume
|
||||
topics = ["telegraf"]
|
||||
## an array of Zookeeper connection strings
|
||||
|
|
Loading…
Reference in New Issue