Merge branch 'plugin/reader' of github.com:influxdata/telegraf into plugin/reader

This commit is contained in:
Max U 2018-06-26 14:19:38 -07:00
commit 3deeea7a8a
6 changed files with 106 additions and 1670 deletions

101
Makefile
View File

@ -1,101 +0,0 @@
PREFIX := /usr/local
VERSION := $(shell git describe --exact-match --tags 2>/dev/null)
BRANCH := $(shell git rev-parse --abbrev-ref HEAD)
COMMIT := $(shell git rev-parse --short HEAD)
GOFILES ?= $(shell git ls-files '*.go')
GOFMT ?= $(shell gofmt -l $(filter-out plugins/parsers/influx/machine.go, $(GOFILES)))
BUILDFLAGS ?=
ifdef GOBIN
PATH := $(GOBIN):$(PATH)
else
PATH := $(subst :,/bin:,$(GOPATH))/bin:$(PATH)
endif
LDFLAGS := $(LDFLAGS) -X main.commit=$(COMMIT) -X main.branch=$(BRANCH)
ifdef VERSION
LDFLAGS += -X main.version=$(VERSION)
endif
all:
$(MAKE) deps
$(MAKE) telegraf
deps:
go get -u github.com/golang/lint/golint
go get -u github.com/golang/dep/cmd/dep
dep ensure
telegraf:
go build -ldflags "$(LDFLAGS)" ./cmd/telegraf
go-install:
go install -ldflags "-w -s $(LDFLAGS)" ./cmd/telegraf
install: telegraf
mkdir -p $(DESTDIR)$(PREFIX)/bin/
cp telegraf $(DESTDIR)$(PREFIX)/bin/
test:
go test -short ./...
fmt:
@gofmt -w $(filter-out plugins/parsers/influx/machine.go, $(GOFILES))
fmtcheck:
@echo '[INFO] running gofmt to identify incorrectly formatted code...'
@if [ ! -z "$(GOFMT)" ]; then \
echo "[ERROR] gofmt has found errors in the following files:" ; \
echo "$(GOFMT)" ; \
echo "" ;\
echo "Run make fmt to fix them." ; \
exit 1 ;\
fi
@echo '[INFO] done.'
test-windows:
go test -short ./plugins/inputs/ping/...
go test -short ./plugins/inputs/win_perf_counters/...
go test -short ./plugins/inputs/win_services/...
go test -short ./plugins/inputs/procstat/...
go test -short ./plugins/inputs/ntpq/...
# vet runs the Go source code static analysis tool `vet` to find
# any common errors.
vet:
@echo 'go vet $$(go list ./... | grep -v ./plugins/parsers/influx)'
@go vet $$(go list ./... | grep -v ./plugins/parsers/influx) ; if [ $$? -ne 0 ]; then \
echo ""; \
echo "go vet has found suspicious constructs. Please remediate any reported errors"; \
echo "to fix them before submitting code for review."; \
exit 1; \
fi
test-ci: fmtcheck vet
go test -short ./...
test-all: fmtcheck vet
go test ./...
package:
./scripts/build.py --package --platform=all --arch=all
clean:
rm -f telegraf
rm -f telegraf.exe
docker-image:
./scripts/build.py --package --platform=linux --arch=amd64
cp build/telegraf*$(COMMIT)*.deb .
docker build -f scripts/dev.docker --build-arg "package=telegraf*$(COMMIT)*.deb" -t "telegraf-dev:$(COMMIT)" .
plugins/parsers/influx/machine.go: plugins/parsers/influx/machine.go.rl
ragel -Z -G2 $^ -o $@
<<<<<<< HEAD
.PHONY: deps telegraf install test test-windows lint vet test-all package clean docker-image fmtcheck uint64 static
=======
.PHONY: deps telegraf install test test-windows lint vet test-all package clean docker-image fmtcheck uint64
>>>>>>> 23523ffd10bac33b0e40bad98bee4213049109ff

View File

@ -9,10 +9,8 @@ Telegraf is able to parse the following input data formats into metrics:
1. [Nagios](https://github.com/influxdata/telegraf/blob/master/docs/DATA_FORMATS_INPUT.md#nagios) (exec input only) 1. [Nagios](https://github.com/influxdata/telegraf/blob/master/docs/DATA_FORMATS_INPUT.md#nagios) (exec input only)
1. [Collectd](https://github.com/influxdata/telegraf/blob/master/docs/DATA_FORMATS_INPUT.md#collectd) 1. [Collectd](https://github.com/influxdata/telegraf/blob/master/docs/DATA_FORMATS_INPUT.md#collectd)
1. [Dropwizard](https://github.com/influxdata/telegraf/blob/master/docs/DATA_FORMATS_INPUT.md#dropwizard) 1. [Dropwizard](https://github.com/influxdata/telegraf/blob/master/docs/DATA_FORMATS_INPUT.md#dropwizard)
<<<<<<< HEAD
1. [Grok](https://github.com/influxdata/telegraf/blob/master/docs/DATA_FORMATS_INPUT.md#grok) 1. [Grok](https://github.com/influxdata/telegraf/blob/master/docs/DATA_FORMATS_INPUT.md#grok)
=======
>>>>>>> 23523ffd10bac33b0e40bad98bee4213049109ff
Telegraf metrics, like InfluxDB Telegraf metrics, like InfluxDB
[points](https://docs.influxdata.com/influxdb/v0.10/write_protocols/line/), [points](https://docs.influxdata.com/influxdb/v0.10/write_protocols/line/),
@ -655,7 +653,6 @@ For more information about the dropwizard json format see
# [inputs.exec.dropwizard_tag_paths] # [inputs.exec.dropwizard_tag_paths]
# tag1 = "tags.tag1" # tag1 = "tags.tag1"
# tag2 = "tags.tag2" # tag2 = "tags.tag2"
<<<<<<< HEAD
``` ```
#### Grok #### Grok
@ -689,8 +686,4 @@ Parse logstash-style "grok" patterns:
## 2. "Canada/Eastern" -- Unix TZ values like those found in https://en.wikipedia.org/wiki/List_of_tz_database_time_zones ## 2. "Canada/Eastern" -- Unix TZ values like those found in https://en.wikipedia.org/wiki/List_of_tz_database_time_zones
## 3. UTC -- or blank/unspecified, will return timestamp in UTC ## 3. UTC -- or blank/unspecified, will return timestamp in UTC
timezone = "Canada/Eastern" timezone = "Canada/Eastern"
```
=======
``` ```
>>>>>>> 23523ffd10bac33b0e40bad98bee4213049109ff

File diff suppressed because it is too large Load Diff

View File

@ -1,4 +1,3 @@
[[inputs.reader]] [[inputs.reader]]
files = ["/var/log/test.log"] files = ["/var/log/test.log"]
data_format = "json" data_format = "json"

View File

@ -88,7 +88,6 @@ type Config struct {
// an optional map containing tag names as keys and json paths to retrieve the tag values from as values // an optional map containing tag names as keys and json paths to retrieve the tag values from as values
// used if TagsPath is empty or doesn't return any tags // used if TagsPath is empty or doesn't return any tags
DropwizardTagPathsMap map[string]string DropwizardTagPathsMap map[string]string
<<<<<<< HEAD
//grok patterns //grok patterns
Patterns []string Patterns []string
@ -96,8 +95,6 @@ type Config struct {
CustomPatterns string CustomPatterns string
CustomPatternFiles []string CustomPatternFiles []string
TimeZone string TimeZone string
=======
>>>>>>> 23523ffd10bac33b0e40bad98bee4213049109ff
} }
// NewParser returns a Parser interface based on the given config. // NewParser returns a Parser interface based on the given config.
@ -131,7 +128,7 @@ func NewParser(config *Config) (Parser, error) {
config.DefaultTags, config.DefaultTags,
config.Separator, config.Separator,
config.Templates) config.Templates)
<<<<<<< HEAD
case "grok": case "grok":
parser, err = NewGrokParser( parser, err = NewGrokParser(
config.MetricName, config.MetricName,
@ -140,8 +137,6 @@ func NewParser(config *Config) (Parser, error) {
config.CustomPatterns, config.CustomPatterns,
config.CustomPatternFiles, config.CustomPatternFiles,
config.TimeZone) config.TimeZone)
=======
>>>>>>> 23523ffd10bac33b0e40bad98bee4213049109ff
default: default:
err = fmt.Errorf("Invalid data format: %s", config.DataFormat) err = fmt.Errorf("Invalid data format: %s", config.DataFormat)
} }

104
telegraf.conf Normal file
View File

@ -0,0 +1,104 @@
# Global tags can be specified here in key="value" format.
[global_tags]
# dc = "us-east-1" # will tag all metrics with dc=us-east-1
# rack = "1a"
## Environment variables can be used as tags, and throughout the config file
# user = "$USER"
# Configuration for telegraf agent
[agent]
## Default data collection interval for all inputs
interval = "10s"
## Rounds collection interval to 'interval'
## ie, if interval="10s" then always collect on :00, :10, :20, etc.
round_interval = true
## Telegraf will send metrics to outputs in batches of at most
## metric_batch_size metrics.
## This controls the size of writes that Telegraf sends to output plugins.
metric_batch_size = 1000
## For failed writes, telegraf will cache metric_buffer_limit metrics for each
## output, and will flush this buffer on a successful write. Oldest metrics
## are dropped first when this buffer fills.
## This buffer only fills when writes fail to output plugin(s).
metric_buffer_limit = 10000
## Collection jitter is used to jitter the collection by a random amount.
## Each plugin will sleep for a random time within jitter before collecting.
## This can be used to avoid many plugins querying things like sysfs at the
## same time, which can have a measurable effect on the system.
collection_jitter = "0s"
## Default flushing interval for all outputs. You shouldn't set this below
## interval. Maximum flush_interval will be flush_interval + flush_jitter
flush_interval = "10s"
## Jitter the flush interval by a random amount. This is primarily to avoid
## large write spikes for users running a large number of telegraf instances.
## ie, a jitter of 5s and interval 10s means flushes will happen every 10-15s
flush_jitter = "0s"
## By default or when set to "0s", precision will be set to the same
## timestamp order as the collection interval, with the maximum being 1s.
## ie, when interval = "10s", precision will be "1s"
## when interval = "250ms", precision will be "1ms"
## Precision will NOT be used for service inputs. It is up to each individual
## service input to set the timestamp at the appropriate precision.
## Valid time units are "ns", "us" (or "µs"), "ms", "s".
precision = ""
## Logging configuration:
## Run telegraf with debug log messages.
debug = false
## Run telegraf in quiet mode (error log messages only).
quiet = false
## Specify the log file name. The empty string means to log to stderr.
logfile = ""
## Override default hostname, if empty use os.Hostname()
hostname = ""
## If set to true, do no set the "host" tag in the telegraf agent.
omit_hostname = false
# # reload and gather from file[s] on telegraf's interval
[[inputs.reader]]
# ## These accept standard unix glob matching rules, but with the addition of
# ## ** as a "super asterisk". ie:
# ## /var/log/**.log -> recursively find all .log files in /var/log
# ## /var/log/*/*.log -> find all .log files with a parent dir in /var/log
# ## /var/log/apache.log -> only tail the apache log file
files = ["/Users/maxu/go/src/github.com/influxdata/telegraf/plugins/inputs/logparser/grok/testdata/**.log"]
#
# ## The dataformat to be read from files
# ## Each data format has its own unique set of configuration options, read
# ## more about them here:
# ## https://github.com/influxdata/telegraf/blob/master/docs/DATA_FORMATS_INPUT.md
data_format = "grok"
#
patterns = ["%{TEST_LOG_B}","%{TEST_LOG_A}"]
#
# ## Name of the outputted measurement name.
name_override = "grok_reader"
#
# ## Full path(s) to custom pattern files.
custom_pattern_files = ["/Users/maxu/go/src/github.com/influxdata/telegraf/plugins/inputs/logparser/grok/testdata/test-patterns"]
#
# ## Custom patterns can also be defined here. Put one pattern per line.
# custom_patterns = '''
# '''
#
# ## Timezone allows you to provide an override for timestamps that
# ## don't already include an offset
# ## e.g. 04/06/2016 12:41:45 data one two 5.43µs
# ##
# ## Default: "" which renders UTC
# ## Options are as follows:
# ## 1. Local -- interpret based on machine localtime
# ## 2. "Canada/Eastern" -- Unix TZ values like those found in https://en.wikipedia.org/wiki/List_of_tz_database_time_zones
# ## 3. UTC -- or blank/unspecified, will return timestamp in UTC
# timezone = "Canada/Eastern"