Improve docs to clarify common issues (#5054)
This commit is contained in:
parent
74d8523db6
commit
83bc3d1277
|
@ -14,7 +14,7 @@
|
|||
|
||||
[[inputs.jolokia2_agent.metric]]
|
||||
name = "java_garbage_collector"
|
||||
mbean = "java.lang:name=G1*,type=GarbageCollector"
|
||||
mbean = "java.lang:name=*,type=GarbageCollector"
|
||||
paths = ["CollectionTime", "CollectionCount"]
|
||||
tag_keys = ["name"]
|
||||
|
||||
|
|
|
@ -76,6 +76,7 @@ line and the `semantic_name` is used to name the field or tag. The extension
|
|||
other special handling.
|
||||
|
||||
By default all named captures are converted into string fields.
|
||||
If a pattern does not have a semantic name it will not be captured.
|
||||
Timestamp modifiers can be used to convert captures to the timestamp of the
|
||||
parsed metric. If no timestamp is parsed the metric will be created using the
|
||||
current time.
|
||||
|
|
|
@ -1,73 +0,0 @@
|
|||
# Captures are a slightly modified version of logstash "grok" patterns, with
|
||||
# the format %{<capture syntax>[:<semantic name>][:<modifier>]}
|
||||
# By default all named captures are converted into string fields.
|
||||
# Modifiers can be used to convert captures to other types or tags.
|
||||
# Timestamp modifiers can be used to convert captures to the timestamp of the
|
||||
# parsed metric.
|
||||
|
||||
# View logstash grok pattern docs here:
|
||||
# https://www.elastic.co/guide/en/logstash/current/plugins-filters-grok.html
|
||||
# All default logstash patterns are supported, these can be viewed here:
|
||||
# https://github.com/logstash-plugins/logstash-patterns-core/blob/master/patterns/grok-patterns
|
||||
|
||||
# Available modifiers:
|
||||
# string (default if nothing is specified)
|
||||
# int
|
||||
# float
|
||||
# duration (ie, 5.23ms gets converted to int nanoseconds)
|
||||
# tag (converts the field into a tag)
|
||||
# drop (drops the field completely)
|
||||
# Timestamp modifiers:
|
||||
# ts-ansic ("Mon Jan _2 15:04:05 2006")
|
||||
# ts-unix ("Mon Jan _2 15:04:05 MST 2006")
|
||||
# ts-ruby ("Mon Jan 02 15:04:05 -0700 2006")
|
||||
# ts-rfc822 ("02 Jan 06 15:04 MST")
|
||||
# ts-rfc822z ("02 Jan 06 15:04 -0700")
|
||||
# ts-rfc850 ("Monday, 02-Jan-06 15:04:05 MST")
|
||||
# ts-rfc1123 ("Mon, 02 Jan 2006 15:04:05 MST")
|
||||
# ts-rfc1123z ("Mon, 02 Jan 2006 15:04:05 -0700")
|
||||
# ts-rfc3339 ("2006-01-02T15:04:05Z07:00")
|
||||
# ts-rfc3339nano ("2006-01-02T15:04:05.999999999Z07:00")
|
||||
# ts-httpd ("02/Jan/2006:15:04:05 -0700")
|
||||
# ts-epoch (seconds since unix epoch)
|
||||
# ts-epochnano (nanoseconds since unix epoch)
|
||||
# ts-"CUSTOM"
|
||||
# CUSTOM time layouts must be within quotes and be the representation of the
|
||||
# "reference time", which is Mon Jan 2 15:04:05 -0700 MST 2006
|
||||
# See https://golang.org/pkg/time/#Parse for more details.
|
||||
|
||||
# Example log file pattern, example log looks like this:
|
||||
# [04/Jun/2016:12:41:45 +0100] 1.25 200 192.168.1.1 5.432µs
|
||||
# Breakdown of the DURATION pattern below:
|
||||
# NUMBER is a builtin logstash grok pattern matching float & int numbers.
|
||||
# [nuµm]? is a regex specifying 0 or 1 of the characters within brackets.
|
||||
# s is also regex, this pattern must end in "s".
|
||||
# so DURATION will match something like '5.324ms' or '6.1µs' or '10s'
|
||||
DURATION %{NUMBER}[nuµm]?s
|
||||
RESPONSE_CODE %{NUMBER:response_code:tag}
|
||||
RESPONSE_TIME %{DURATION:response_time_ns:duration}
|
||||
EXAMPLE_LOG \[%{HTTPDATE:ts:ts-httpd}\] %{NUMBER:myfloat:float} %{RESPONSE_CODE} %{IPORHOST:clientip} %{RESPONSE_TIME}
|
||||
|
||||
# Wider-ranging username matching vs. logstash built-in %{USER}
|
||||
NGUSERNAME [a-zA-Z0-9\.\@\-\+_%]+
|
||||
NGUSER %{NGUSERNAME}
|
||||
# Wider-ranging client IP matching
|
||||
CLIENT (?:%{IPORHOST}|%{HOSTPORT}|::1)
|
||||
|
||||
##
|
||||
## COMMON LOG PATTERNS
|
||||
##
|
||||
|
||||
# apache & nginx logs, this is also known as the "common log format"
|
||||
# see https://en.wikipedia.org/wiki/Common_Log_Format
|
||||
COMMON_LOG_FORMAT %{CLIENT:client_ip} %{NOTSPACE:ident} %{NOTSPACE:auth} \[%{HTTPDATE:ts:ts-httpd}\] "(?:%{WORD:verb:tag} %{NOTSPACE:request}(?: HTTP/%{NUMBER:http_version:float})?|%{DATA})" %{NUMBER:resp_code:tag} (?:%{NUMBER:resp_bytes:int}|-)
|
||||
|
||||
# Combined log format is the same as the common log format but with the addition
|
||||
# of two quoted strings at the end for "referrer" and "agent"
|
||||
# See Examples at http://httpd.apache.org/docs/current/mod/mod_log_config.html
|
||||
COMBINED_LOG_FORMAT %{COMMON_LOG_FORMAT} %{QS:referrer} %{QS:agent}
|
||||
|
||||
# HTTPD log formats
|
||||
HTTPD20_ERRORLOG \[%{HTTPDERROR_DATE:timestamp}\] \[%{LOGLEVEL:loglevel:tag}\] (?:\[client %{IPORHOST:clientip}\] ){0,1}%{GREEDYDATA:errormsg}
|
||||
HTTPD24_ERRORLOG \[%{HTTPDERROR_DATE:timestamp}\] \[%{WORD:module}:%{LOGLEVEL:loglevel:tag}\] \[pid %{POSINT:pid:int}:tid %{NUMBER:tid:int}\]( \(%{POSINT:proxy_errorcode:int}\)%{DATA:proxy_errormessage}:)?( \[client %{IPORHOST:client}:%{POSINT:clientport}\])? %{DATA:errorcode}: %{GREEDYDATA:message}
|
||||
HTTPD_ERRORLOG %{HTTPD20_ERRORLOG}|%{HTTPD24_ERRORLOG}
|
|
@ -15,7 +15,7 @@ import (
|
|||
func TestStartNoParsers(t *testing.T) {
|
||||
logparser := &LogParserPlugin{
|
||||
FromBeginning: true,
|
||||
Files: []string{"grok/testdata/*.log"},
|
||||
Files: []string{"testdata/*.log"},
|
||||
}
|
||||
|
||||
acc := testutil.Accumulator{}
|
||||
|
@ -27,10 +27,10 @@ func TestGrokParseLogFilesNonExistPattern(t *testing.T) {
|
|||
|
||||
logparser := &LogParserPlugin{
|
||||
FromBeginning: true,
|
||||
Files: []string{thisdir + "grok/testdata/*.log"},
|
||||
Files: []string{thisdir + "testdata/*.log"},
|
||||
GrokConfig: GrokConfig{
|
||||
Patterns: []string{"%{FOOBAR}"},
|
||||
CustomPatternFiles: []string{thisdir + "grok/testdata/test-patterns"},
|
||||
CustomPatternFiles: []string{thisdir + "testdata/test-patterns"},
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -46,10 +46,10 @@ func TestGrokParseLogFiles(t *testing.T) {
|
|||
GrokConfig: GrokConfig{
|
||||
MeasurementName: "logparser_grok",
|
||||
Patterns: []string{"%{TEST_LOG_A}", "%{TEST_LOG_B}"},
|
||||
CustomPatternFiles: []string{thisdir + "grok/testdata/test-patterns"},
|
||||
CustomPatternFiles: []string{thisdir + "testdata/test-patterns"},
|
||||
},
|
||||
FromBeginning: true,
|
||||
Files: []string{thisdir + "grok/testdata/*.log"},
|
||||
Files: []string{thisdir + "testdata/*.log"},
|
||||
}
|
||||
|
||||
acc := testutil.Accumulator{}
|
||||
|
@ -67,7 +67,7 @@ func TestGrokParseLogFiles(t *testing.T) {
|
|||
},
|
||||
map[string]string{
|
||||
"response_code": "200",
|
||||
"path": thisdir + "grok/testdata/test_a.log",
|
||||
"path": thisdir + "testdata/test_a.log",
|
||||
})
|
||||
|
||||
acc.AssertContainsTaggedFields(t, "logparser_grok",
|
||||
|
@ -77,7 +77,7 @@ func TestGrokParseLogFiles(t *testing.T) {
|
|||
"nomodifier": "nomodifier",
|
||||
},
|
||||
map[string]string{
|
||||
"path": thisdir + "grok/testdata/test_b.log",
|
||||
"path": thisdir + "testdata/test_b.log",
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -94,7 +94,7 @@ func TestGrokParseLogFilesAppearLater(t *testing.T) {
|
|||
GrokConfig: GrokConfig{
|
||||
MeasurementName: "logparser_grok",
|
||||
Patterns: []string{"%{TEST_LOG_A}", "%{TEST_LOG_B}"},
|
||||
CustomPatternFiles: []string{thisdir + "grok/testdata/test-patterns"},
|
||||
CustomPatternFiles: []string{thisdir + "testdata/test-patterns"},
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -103,7 +103,7 @@ func TestGrokParseLogFilesAppearLater(t *testing.T) {
|
|||
|
||||
assert.Equal(t, acc.NFields(), 0)
|
||||
|
||||
_ = os.Symlink(thisdir+"grok/testdata/test_a.log", emptydir+"/test_a.log")
|
||||
_ = os.Symlink(thisdir+"testdata/test_a.log", emptydir+"/test_a.log")
|
||||
assert.NoError(t, acc.GatherError(logparser.Gather))
|
||||
acc.Wait(1)
|
||||
|
||||
|
@ -129,11 +129,11 @@ func TestGrokParseLogFilesOneBad(t *testing.T) {
|
|||
|
||||
logparser := &LogParserPlugin{
|
||||
FromBeginning: true,
|
||||
Files: []string{thisdir + "grok/testdata/test_a.log"},
|
||||
Files: []string{thisdir + "testdata/test_a.log"},
|
||||
GrokConfig: GrokConfig{
|
||||
MeasurementName: "logparser_grok",
|
||||
Patterns: []string{"%{TEST_LOG_A}", "%{TEST_LOG_BAD}"},
|
||||
CustomPatternFiles: []string{thisdir + "grok/testdata/test-patterns"},
|
||||
CustomPatternFiles: []string{thisdir + "testdata/test-patterns"},
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -153,7 +153,7 @@ func TestGrokParseLogFilesOneBad(t *testing.T) {
|
|||
},
|
||||
map[string]string{
|
||||
"response_code": "200",
|
||||
"path": thisdir + "grok/testdata/test_a.log",
|
||||
"path": thisdir + "testdata/test_a.log",
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -15,6 +15,9 @@
|
|||
## If no port is specified, 6379 is used
|
||||
servers = ["tcp://localhost:6379"]
|
||||
|
||||
## specify server password
|
||||
# password = "s#cr@t%"
|
||||
|
||||
## Optional TLS Config
|
||||
# tls_ca = "/etc/telegraf/ca.pem"
|
||||
# tls_cert = "/etc/telegraf/cert.pem"
|
||||
|
|
|
@ -174,7 +174,7 @@ if any of the combinations of ObjectName/Instances/Counters are invalid.
|
|||
|
||||
### Generic Queries
|
||||
```
|
||||
|
||||
[[inputs.win_perf_counters]]
|
||||
[[inputs.win_perf_counters.object]]
|
||||
# Processor usage, alternative to native, reports on a per core.
|
||||
ObjectName = "Processor"
|
||||
|
@ -218,6 +218,9 @@ if any of the combinations of ObjectName/Instances/Counters are invalid.
|
|||
|
||||
### Active Directory Domain Controller
|
||||
```
|
||||
[[inputs.win_perf_counters]]
|
||||
[inputs.win_perf_counters.tags]
|
||||
monitorgroup = "ActiveDirectory"
|
||||
[[inputs.win_perf_counters.object]]
|
||||
ObjectName = "DirectoryServices"
|
||||
Instances = ["*"]
|
||||
|
@ -243,6 +246,7 @@ if any of the combinations of ObjectName/Instances/Counters are invalid.
|
|||
|
||||
### DFS Namespace + Domain Controllers
|
||||
```
|
||||
[[inputs.win_perf_counters]]
|
||||
[[inputs.win_perf_counters.object]]
|
||||
# AD, DFS N, Useful if the server hosts a DFS Namespace or is a Domain Controller
|
||||
ObjectName = "DFS Namespace Service Referrals"
|
||||
|
@ -253,9 +257,9 @@ if any of the combinations of ObjectName/Instances/Counters are invalid.
|
|||
#WarnOnMissing = false # Print out when the performance counter is missing, either of object, counter or instance.
|
||||
```
|
||||
|
||||
|
||||
### DFS Replication + Domain Controllers
|
||||
```
|
||||
[[inputs.win_perf_counters]]
|
||||
[[inputs.win_perf_counters.object]]
|
||||
# AD, DFS R, Useful if the server hosts a DFS Replication folder or is a Domain Controller
|
||||
ObjectName = "DFS Replication Service Volumes"
|
||||
|
@ -266,9 +270,9 @@ if any of the combinations of ObjectName/Instances/Counters are invalid.
|
|||
#WarnOnMissing = false # Print out when the performance counter is missing, either of object, counter or instance.
|
||||
```
|
||||
|
||||
|
||||
### DNS Server + Domain Controllers
|
||||
```
|
||||
[[inputs.win_perf_counters]]
|
||||
[[inputs.win_perf_counters.object]]
|
||||
ObjectName = "DNS"
|
||||
Counters = ["Dynamic Update Received","Dynamic Update Rejected","Recursive Queries","Recursive Queries Failure","Secure Update Failure","Secure Update Received","TCP Query Received","TCP Response Sent","UDP Query Received","UDP Response Sent","Total Query Received","Total Response Sent"]
|
||||
|
@ -279,6 +283,7 @@ if any of the combinations of ObjectName/Instances/Counters are invalid.
|
|||
|
||||
### IIS / ASP.NET
|
||||
```
|
||||
[[inputs.win_perf_counters]]
|
||||
[[inputs.win_perf_counters.object]]
|
||||
# HTTP Service request queues in the Kernel before being handed over to User Mode.
|
||||
ObjectName = "HTTP Service Request Queues"
|
||||
|
@ -320,9 +325,9 @@ if any of the combinations of ObjectName/Instances/Counters are invalid.
|
|||
#IncludeTotal=false #Set to true to include _Total instance when querying for all (*).
|
||||
```
|
||||
|
||||
|
||||
### Process
|
||||
```
|
||||
[[inputs.win_perf_counters]]
|
||||
[[inputs.win_perf_counters.object]]
|
||||
# Process metrics, in this case for IIS only
|
||||
ObjectName = "Process"
|
||||
|
@ -332,9 +337,9 @@ if any of the combinations of ObjectName/Instances/Counters are invalid.
|
|||
#IncludeTotal=false #Set to true to include _Total instance when querying for all (*).
|
||||
```
|
||||
|
||||
|
||||
### .NET Monitoring
|
||||
```
|
||||
[[inputs.win_perf_counters]]
|
||||
[[inputs.win_perf_counters.object]]
|
||||
# .NET CLR Exceptions, in this case for IIS only
|
||||
ObjectName = ".NET CLR Exceptions"
|
||||
|
|
|
@ -20,6 +20,7 @@ line and the `semantic_name` is used to name the field or tag. The extension
|
|||
other special handling.
|
||||
|
||||
By default all named captures are converted into string fields.
|
||||
If a pattern does not have a semantic name it will not be captured.
|
||||
Timestamp modifiers can be used to convert captures to the timestamp of the
|
||||
parsed metric. If no timestamp is parsed the metric will be created using the
|
||||
current time.
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
package grok
|
||||
|
||||
// DEFAULT_PATTERNS SHOULD BE KEPT IN-SYNC WITH patterns/influx-patterns
|
||||
const DEFAULT_PATTERNS = `
|
||||
# Captures are a slightly modified version of logstash "grok" patterns, with
|
||||
# the format %{<capture syntax>[:<semantic name>][:<modifier>]}
|
||||
# By default all named captures are converted into string fields.
|
||||
# If a pattern does not have a semantic name it will not be captured.
|
||||
# Modifiers can be used to convert captures to other types or tags.
|
||||
# Timestamp modifiers can be used to convert captures to the timestamp of the
|
||||
# parsed metric.
|
||||
|
|
|
@ -43,7 +43,10 @@ ignored unless specified in the `tag_key` or `json_string_fields` options.
|
|||
|
||||
## Time format is the time layout that should be used to interprete the
|
||||
## json_time_key. The time must be `unix`, `unix_ms` or a time in the
|
||||
## "reference time".
|
||||
## "reference time". To define a different format, arrange the values from
|
||||
## the "reference time" in the example to match the format you will be
|
||||
## using. For more information on the "reference time", visit
|
||||
## https://golang.org/pkg/time/#Time.Format
|
||||
## ex: json_time_format = "Mon Jan 2 15:04:05 -0700 MST 2006"
|
||||
## json_time_format = "2006-01-02T15:04:05Z07:00"
|
||||
## json_time_format = "unix"
|
||||
|
|
|
@ -19,7 +19,7 @@ source field is overwritten.
|
|||
|
||||
## Destination field to be used for the mapped value. By default the source
|
||||
## field is used, overwriting the original value.
|
||||
# dest = "status_code"
|
||||
dest = "status_code"
|
||||
|
||||
## Default value to be used for all values not contained in the mapping
|
||||
## table. When unset, the unmodified value for the field will be used if no
|
||||
|
|
Loading…
Reference in New Issue