Use a bufio.Scanner in http listener

this will prevent potential very large allocations due to a very large
chunk size send from a client.

fixes #1823
This commit is contained in:
Cameron Sparr 2016-09-29 12:09:02 +01:00
parent ca8e512e5b
commit 78ced6bc30
3 changed files with 42 additions and 16 deletions

View File

@ -41,6 +41,7 @@
- [#1137](https://github.com/influxdata/telegraf/issues/1137): Fix issue loading config directory on windows. - [#1137](https://github.com/influxdata/telegraf/issues/1137): Fix issue loading config directory on windows.
- [#1772](https://github.com/influxdata/telegraf/pull/1772): Windows remote management interactive service fix. - [#1772](https://github.com/influxdata/telegraf/pull/1772): Windows remote management interactive service fix.
- [#1702](https://github.com/influxdata/telegraf/issues/1702): sqlserver, fix issue when case sensitive collation is activated. - [#1702](https://github.com/influxdata/telegraf/issues/1702): sqlserver, fix issue when case sensitive collation is activated.
- [#1823](https://github.com/influxdata/telegraf/issues/1823): Fix huge allocations in http_listener when dealing with huge payloads.
## v1.0.1 [unreleased] ## v1.0.1 [unreleased]

View File

@ -1,7 +1,9 @@
package http_listener package http_listener
import ( import (
"io/ioutil" "bufio"
"bytes"
"fmt"
"log" "log"
"net" "net"
"net/http" "net/http"
@ -111,25 +113,34 @@ func (t *HttpListener) httpListen() error {
func (t *HttpListener) ServeHTTP(res http.ResponseWriter, req *http.Request) { func (t *HttpListener) ServeHTTP(res http.ResponseWriter, req *http.Request) {
t.wg.Add(1) t.wg.Add(1)
defer t.wg.Done() defer t.wg.Done()
body, err := ioutil.ReadAll(req.Body)
if err != nil {
log.Printf("Problem reading request: [%s], Error: %s\n", string(body), err)
http.Error(res, "ERROR reading request", http.StatusInternalServerError)
return
}
switch req.URL.Path { switch req.URL.Path {
case "/write": case "/write":
var metrics []telegraf.Metric var http400msg bytes.Buffer
metrics, err = t.parser.Parse(body) var partial string
if err == nil { scanner := bufio.NewScanner(req.Body)
for _, m := range metrics { scanner.Buffer([]byte(""), 128*1024)
t.acc.AddFields(m.Name(), m.Fields(), m.Tags(), m.Time()) for scanner.Scan() {
metrics, err := t.parser.Parse(scanner.Bytes())
if err == nil {
for _, m := range metrics {
t.acc.AddFields(m.Name(), m.Fields(), m.Tags(), m.Time())
}
partial = "partial write: "
} else {
http400msg.WriteString(err.Error() + " ")
} }
res.WriteHeader(http.StatusNoContent) }
if err := scanner.Err(); err != nil {
http.Error(res, "Internal server error: "+err.Error(), http.StatusInternalServerError)
} else if http400msg.Len() > 0 {
res.Header().Set("Content-Type", "application/json")
res.Header().Set("X-Influxdb-Version", "1.0")
res.WriteHeader(http.StatusBadRequest)
res.Write([]byte(fmt.Sprintf(`{"error":"%s%s"}`, partial, http400msg.String())))
} else { } else {
log.Printf("Problem parsing body: [%s], Error: %s\n", string(body), err) res.WriteHeader(http.StatusNoContent)
http.Error(res, "ERROR parsing metrics", http.StatusInternalServerError)
} }
case "/query": case "/query":
// Deliver a dummy response to the query endpoint, as some InfluxDB // Deliver a dummy response to the query endpoint, as some InfluxDB

File diff suppressed because one or more lines are too long