From af6e7b95318613662791922bec2c4c5b1969ad51 Mon Sep 17 00:00:00 2001
From: Cameron Sparr <cameronsparr@gmail.com>
Date: Thu, 1 Dec 2016 15:18:46 +0000
Subject: [PATCH] More unit tests for new metric

---
 metric.go                             |   4 +-
 metric/escape.go                      |  49 ++++
 metric/inline_strconv_parse_test.go   | 103 +++++++
 metric/metric.go                      | 131 ++++-----
 metric/metric_benchmark_test.go       |  14 +
 metric/metric_test.go                 | 393 ++++++++++++++++++++++++++
 metric/parse.go                       |  43 +--
 metric/parse_test.go                  | 355 +++++++++++++++++++++++
 plugins/serializers/json/json_test.go |  29 +-
 9 files changed, 1007 insertions(+), 114 deletions(-)
 create mode 100644 metric/escape.go
 create mode 100644 metric/inline_strconv_parse_test.go
 create mode 100644 metric/parse_test.go

diff --git a/metric.go b/metric.go
index 510b4235..a29a6334 100644
--- a/metric.go
+++ b/metric.go
@@ -26,12 +26,12 @@ type Metric interface {
 	// Tag functions
 	HasTag(key string) bool
 	AddTag(key, value string)
-	RemoveTag(key string) bool
+	RemoveTag(key string)
 
 	// Field functions
 	HasField(key string) bool
 	AddField(key string, value interface{})
-	RemoveField(key string) bool
+	RemoveField(key string) error
 
 	// Name functions
 	SetName(name string)
diff --git a/metric/escape.go b/metric/escape.go
new file mode 100644
index 00000000..a717f514
--- /dev/null
+++ b/metric/escape.go
@@ -0,0 +1,49 @@
+package metric
+
+import (
+	"strings"
+)
+
+var (
+	// escaper is for escaping:
+	//   - tag keys
+	//   - tag values
+	//   - field keys
+	// see https://docs.influxdata.com/influxdb/v1.0/write_protocols/line_protocol_tutorial/#special-characters-and-keywords
+	escaper   = strings.NewReplacer(`,`, `\,`, `"`, `\"`, ` `, `\ `, `=`, `\=`)
+	unEscaper = strings.NewReplacer(`\,`, `,`, `\"`, `"`, `\ `, ` `, `\=`, `=`)
+
+	// nameEscaper is for escaping measurement names only.
+	// see https://docs.influxdata.com/influxdb/v1.0/write_protocols/line_protocol_tutorial/#special-characters-and-keywords
+	nameEscaper   = strings.NewReplacer(`,`, `\,`, ` `, `\ `)
+	nameUnEscaper = strings.NewReplacer(`\,`, `,`, `\ `, ` `)
+
+	// stringFieldEscaper is for escaping string field values only.
+	// see https://docs.influxdata.com/influxdb/v1.0/write_protocols/line_protocol_tutorial/#special-characters-and-keywords
+	stringFieldEscaper   = strings.NewReplacer(`"`, `\"`)
+	stringFieldUnEscaper = strings.NewReplacer(`\"`, `"`)
+)
+
+func escape(s string, t string) string {
+	switch t {
+	case "fieldkey", "tagkey", "tagval":
+		return escaper.Replace(s)
+	case "name":
+		return nameEscaper.Replace(s)
+	case "fieldval":
+		return stringFieldEscaper.Replace(s)
+	}
+	return s
+}
+
+func unescape(s string, t string) string {
+	switch t {
+	case "fieldkey", "tagkey", "tagval":
+		return unEscaper.Replace(s)
+	case "name":
+		return nameUnEscaper.Replace(s)
+	case "fieldval":
+		return stringFieldUnEscaper.Replace(s)
+	}
+	return s
+}
diff --git a/metric/inline_strconv_parse_test.go b/metric/inline_strconv_parse_test.go
new file mode 100644
index 00000000..2998d4c2
--- /dev/null
+++ b/metric/inline_strconv_parse_test.go
@@ -0,0 +1,103 @@
+package metric
+
+import (
+	"strconv"
+	"testing"
+	"testing/quick"
+)
+
+func TestParseIntBytesEquivalenceFuzz(t *testing.T) {
+	f := func(b []byte, base int, bitSize int) bool {
+		exp, expErr := strconv.ParseInt(string(b), base, bitSize)
+		got, gotErr := parseIntBytes(b, base, bitSize)
+
+		return exp == got && checkErrs(expErr, gotErr)
+	}
+
+	cfg := &quick.Config{
+		MaxCount: 10000,
+	}
+
+	if err := quick.Check(f, cfg); err != nil {
+		t.Fatal(err)
+	}
+}
+
+func TestParseIntBytesValid64bitBase10EquivalenceFuzz(t *testing.T) {
+	buf := []byte{}
+	f := func(n int64) bool {
+		buf = strconv.AppendInt(buf[:0], n, 10)
+
+		exp, expErr := strconv.ParseInt(string(buf), 10, 64)
+		got, gotErr := parseIntBytes(buf, 10, 64)
+
+		return exp == got && checkErrs(expErr, gotErr)
+	}
+
+	cfg := &quick.Config{
+		MaxCount: 10000,
+	}
+
+	if err := quick.Check(f, cfg); err != nil {
+		t.Fatal(err)
+	}
+}
+
+func TestParseFloatBytesEquivalenceFuzz(t *testing.T) {
+	f := func(b []byte, bitSize int) bool {
+		exp, expErr := strconv.ParseFloat(string(b), bitSize)
+		got, gotErr := parseFloatBytes(b, bitSize)
+
+		return exp == got && checkErrs(expErr, gotErr)
+	}
+
+	cfg := &quick.Config{
+		MaxCount: 10000,
+	}
+
+	if err := quick.Check(f, cfg); err != nil {
+		t.Fatal(err)
+	}
+}
+
+func TestParseFloatBytesValid64bitEquivalenceFuzz(t *testing.T) {
+	buf := []byte{}
+	f := func(n float64) bool {
+		buf = strconv.AppendFloat(buf[:0], n, 'f', -1, 64)
+
+		exp, expErr := strconv.ParseFloat(string(buf), 64)
+		got, gotErr := parseFloatBytes(buf, 64)
+
+		return exp == got && checkErrs(expErr, gotErr)
+	}
+
+	cfg := &quick.Config{
+		MaxCount: 10000,
+	}
+
+	if err := quick.Check(f, cfg); err != nil {
+		t.Fatal(err)
+	}
+}
+
+func TestParseBoolBytesEquivalence(t *testing.T) {
+	var buf []byte
+	for _, s := range []string{"1", "t", "T", "TRUE", "true", "True", "0", "f", "F", "FALSE", "false", "False", "fail", "TrUe", "FAlSE", "numbers", ""} {
+		buf = append(buf[:0], s...)
+
+		exp, expErr := strconv.ParseBool(s)
+		got, gotErr := parseBoolBytes(buf)
+
+		if got != exp || !checkErrs(expErr, gotErr) {
+			t.Errorf("Failed to parse boolean value %q correctly: wanted (%t, %v), got (%t, %v)", s, exp, expErr, got, gotErr)
+		}
+	}
+}
+
+func checkErrs(a, b error) bool {
+	if (a == nil) != (b == nil) {
+		return false
+	}
+
+	return a == nil || a.Error() == b.Error()
+}
diff --git a/metric/metric.go b/metric/metric.go
index 6e369b0e..4514d8ec 100644
--- a/metric/metric.go
+++ b/metric/metric.go
@@ -6,7 +6,6 @@ import (
 	"hash/fnv"
 	"sort"
 	"strconv"
-	"strings"
 	"time"
 
 	"github.com/influxdata/telegraf"
@@ -17,23 +16,6 @@ import (
 
 const MaxInt = int(^uint(0) >> 1)
 
-var (
-	// escaper is for escaping:
-	//   - tag keys
-	//   - tag values
-	//   - field keys
-	// see https://docs.influxdata.com/influxdb/v1.0/write_protocols/line_protocol_tutorial/#special-characters-and-keywords
-	escaper = strings.NewReplacer(`,`, `\,`, `"`, `\"`, ` `, `\ `, `=`, `\=`)
-
-	// nameEscaper is for escaping measurement names only.
-	// see https://docs.influxdata.com/influxdb/v1.0/write_protocols/line_protocol_tutorial/#special-characters-and-keywords
-	nameEscaper = strings.NewReplacer(`,`, `\,`, ` `, `\ `)
-
-	// stringFieldEscaper is for escaping string field values only.
-	// see https://docs.influxdata.com/influxdb/v1.0/write_protocols/line_protocol_tutorial/#special-characters-and-keywords
-	stringFieldEscaper = strings.NewReplacer(`"`, `\"`)
-)
-
 func New(
 	name string,
 	tags map[string]string,
@@ -44,6 +26,9 @@ func New(
 	if len(fields) == 0 {
 		return nil, fmt.Errorf("Metric cannot be made without any fields")
 	}
+	if len(name) == 0 {
+		return nil, fmt.Errorf("Metric cannot be made with an empty name")
+	}
 
 	var thisType telegraf.ValueType
 	if len(mType) > 0 {
@@ -53,7 +38,7 @@ func New(
 	}
 
 	m := &metric{
-		name:  []byte(nameEscaper.Replace(name)),
+		name:  []byte(escape(name, "name")),
 		t:     []byte(fmt.Sprint(t.UnixNano())),
 		nsec:  t.UnixNano(),
 		mType: thisType,
@@ -62,7 +47,8 @@ func New(
 	// pre-allocate exact size of the tags slice
 	taglen := 0
 	for k, v := range tags {
-		taglen += 2 + len(escaper.Replace(k)) + len(escaper.Replace(v))
+		// TODO check that length of tag key & value are > 0
+		taglen += 2 + len(escape(k, "tagkey")) + len(escape(v, "tagval"))
 	}
 	m.tags = make([]byte, taglen)
 
@@ -70,10 +56,10 @@ func New(
 	for k, v := range tags {
 		m.tags[i] = ','
 		i++
-		i += copy(m.tags[i:], escaper.Replace(k))
+		i += copy(m.tags[i:], escape(k, "tagkey"))
 		m.tags[i] = '='
 		i++
-		i += copy(m.tags[i:], escaper.Replace(v))
+		i += copy(m.tags[i:], escape(v, "tagval"))
 	}
 
 	// pre-allocate capacity of the fields slice
@@ -147,10 +133,8 @@ type metric struct {
 	aggregate bool
 
 	// cached values for reuse in "get" functions
-	hashID   uint64
-	nsec     int64
-	fieldMap map[string]interface{}
-	tagMap   map[string]string
+	hashID uint64
+	nsec   int64
 }
 
 func (m *metric) Point() *client.Point {
@@ -195,12 +179,7 @@ func (m *metric) Serialize() []byte {
 }
 
 func (m *metric) Fields() map[string]interface{} {
-	if m.fieldMap != nil {
-		// TODO should we return a copy?
-		return m.fieldMap
-	}
-
-	m.fieldMap = map[string]interface{}{}
+	fieldMap := map[string]interface{}{}
 	i := 0
 	for {
 		if i >= len(m.fields) {
@@ -232,31 +211,31 @@ func (m *metric) Fields() map[string]interface{} {
 		switch m.fields[i:][i2] {
 		case '"':
 			// string field
-			m.fieldMap[string(m.fields[i:][0:i1])] = string(m.fields[i:][i2+1 : i3-1])
+			fieldMap[unescape(string(m.fields[i:][0:i1]), "fieldkey")] = unescape(string(m.fields[i:][i2+1:i3-1]), "fieldval")
 		case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
 			// number field
 			switch m.fields[i:][i3-1] {
 			case 'i':
 				// integer field
-				n, err := strconv.ParseInt(string(m.fields[i:][i2:i3-1]), 10, 64)
+				n, err := parseIntBytes(m.fields[i:][i2:i3-1], 10, 64)
 				if err == nil {
-					m.fieldMap[string(m.fields[i:][0:i1])] = n
+					fieldMap[unescape(string(m.fields[i:][0:i1]), "fieldkey")] = n
 				} else {
 					// TODO handle error or just ignore field silently?
 				}
 			default:
 				// float field
-				n, err := strconv.ParseFloat(string(m.fields[i:][i2:i3]), 64)
+				n, err := parseFloatBytes(m.fields[i:][i2:i3], 64)
 				if err == nil {
-					m.fieldMap[string(m.fields[i:][0:i1])] = n
+					fieldMap[unescape(string(m.fields[i:][0:i1]), "fieldkey")] = n
 				} else {
 					// TODO handle error or just ignore field silently?
 				}
 			}
 		case 'T', 't':
-			m.fieldMap[string(m.fields[i:][0:i1])] = true
+			fieldMap[unescape(string(m.fields[i:][0:i1]), "fieldkey")] = true
 		case 'F', 'f':
-			m.fieldMap[string(m.fields[i:][0:i1])] = false
+			fieldMap[unescape(string(m.fields[i:][0:i1]), "fieldkey")] = false
 		default:
 			// TODO handle unsupported field type
 		}
@@ -264,18 +243,13 @@ func (m *metric) Fields() map[string]interface{} {
 		i += i3 + 1
 	}
 
-	return m.fieldMap
+	return fieldMap
 }
 
 func (m *metric) Tags() map[string]string {
-	if m.tagMap != nil {
-		// TODO should we return a copy?
-		return m.tagMap
-	}
-
-	m.tagMap = map[string]string{}
+	tagMap := map[string]string{}
 	if len(m.tags) == 0 {
-		return m.tagMap
+		return tagMap
 	}
 
 	i := 0
@@ -293,25 +267,25 @@ func (m *metric) Tags() map[string]string {
 		// end index of tag value (starting from i2)
 		i3 := indexUnescapedByte(m.tags[i+i2:], ',')
 		if i3 == -1 {
-			m.tagMap[string(m.tags[i:][i0:i1])] = string(m.tags[i:][i2:])
+			tagMap[unescape(string(m.tags[i:][i0:i1]), "tagkey")] = unescape(string(m.tags[i:][i2:]), "tagval")
 			break
 		}
-		m.tagMap[string(m.tags[i:][i0:i1])] = string(m.tags[i:][i2 : i2+i3])
+		tagMap[unescape(string(m.tags[i:][i0:i1]), "tagkey")] = unescape(string(m.tags[i:][i2:i2+i3]), "tagval")
 		// increment start index for the next tag
 		i += i2 + i3
 	}
 
-	return m.tagMap
+	return tagMap
 }
 
 func (m *metric) Name() string {
-	return string(m.name)
+	return unescape(string(m.name), "name")
 }
 
 func (m *metric) Time() time.Time {
 	// assume metric has been verified already and ignore error:
 	if m.nsec == 0 {
-		m.nsec, _ = strconv.ParseInt(string(m.t), 10, 64)
+		m.nsec, _ = parseIntBytes(m.t, 10, 64)
 	}
 	return time.Unix(0, m.nsec)
 }
@@ -319,7 +293,7 @@ func (m *metric) Time() time.Time {
 func (m *metric) UnixNano() int64 {
 	// assume metric has been verified already and ignore error:
 	if m.nsec == 0 {
-		m.nsec, _ = strconv.ParseInt(string(m.t), 10, 64)
+		m.nsec, _ = parseIntBytes(m.t, 10, 64)
 	}
 	return m.nsec
 }
@@ -328,10 +302,12 @@ func (m *metric) SetName(name string) {
 	m.hashID = 0
 	m.name = []byte(nameEscaper.Replace(name))
 }
+
 func (m *metric) SetPrefix(prefix string) {
 	m.hashID = 0
 	m.name = append([]byte(nameEscaper.Replace(prefix)), m.name...)
 }
+
 func (m *metric) SetSuffix(suffix string) {
 	m.hashID = 0
 	m.name = append(m.name, []byte(nameEscaper.Replace(suffix))...)
@@ -339,24 +315,23 @@ func (m *metric) SetSuffix(suffix string) {
 
 func (m *metric) AddTag(key, value string) {
 	m.RemoveTag(key)
-	m.tags = append(m.tags, []byte(","+escaper.Replace(key)+"="+escaper.Replace(value))...)
+	m.tags = append(m.tags, []byte(","+escape(key, "tagkey")+"="+escape(value, "tagval"))...)
 }
 
 func (m *metric) HasTag(key string) bool {
-	i := bytes.Index(m.tags, []byte(escaper.Replace(key)+"="))
+	i := bytes.Index(m.tags, []byte(escape(key, "tagkey")+"="))
 	if i == -1 {
 		return false
 	}
 	return true
 }
 
-func (m *metric) RemoveTag(key string) bool {
-	m.tagMap = nil
+func (m *metric) RemoveTag(key string) {
 	m.hashID = 0
 
-	i := bytes.Index(m.tags, []byte(escaper.Replace(key)+"="))
+	i := bytes.Index(m.tags, []byte(escape(key, "tagkey")+"="))
 	if i == -1 {
-		return false
+		return
 	}
 
 	tmp := m.tags[0 : i-1]
@@ -365,38 +340,43 @@ func (m *metric) RemoveTag(key string) bool {
 		tmp = append(tmp, m.tags[i+j:]...)
 	}
 	m.tags = tmp
-	return true
+	return
 }
 
 func (m *metric) AddField(key string, value interface{}) {
-	m.fieldMap = nil
 	m.fields = append(m.fields, ',')
-	appendField(m.fields, key, value)
+	m.fields = appendField(m.fields, key, value)
 }
 
 func (m *metric) HasField(key string) bool {
-	i := bytes.Index(m.fields, []byte(escaper.Replace(key)+"="))
+	i := bytes.Index(m.fields, []byte(escape(key, "tagkey")+"="))
 	if i == -1 {
 		return false
 	}
 	return true
 }
 
-func (m *metric) RemoveField(key string) bool {
-	m.fieldMap = nil
-	m.hashID = 0
-	i := bytes.Index(m.fields, []byte(escaper.Replace(key)+"="))
+func (m *metric) RemoveField(key string) error {
+	i := bytes.Index(m.fields, []byte(escape(key, "tagkey")+"="))
 	if i == -1 {
-		return false
+		return nil
 	}
 
-	tmp := m.fields[0 : i-1]
+	var tmp []byte
+	if i != 0 {
+		tmp = m.fields[0 : i-1]
+	}
 	j := indexUnescapedByte(m.fields[i:], ',')
 	if j != -1 {
 		tmp = append(tmp, m.fields[i+j:]...)
 	}
+
+	if len(tmp) == 0 {
+		return fmt.Errorf("Metric cannot remove final field: %s", m.fields)
+	}
+
 	m.fields = tmp
-	return true
+	return nil
 }
 
 func (m *metric) Copy() telegraf.Metric {
@@ -437,7 +417,10 @@ func (m *metric) HashID() uint64 {
 }
 
 func appendField(b []byte, k string, v interface{}) []byte {
-	b = append(b, []byte(escaper.Replace(k)+"=")...)
+	if v == nil {
+		return b
+	}
+	b = append(b, []byte(escape(k, "tagkey")+"=")...)
 
 	// check popular types first
 	switch v := v.(type) {
@@ -448,7 +431,7 @@ func appendField(b []byte, k string, v interface{}) []byte {
 		b = append(b, 'i')
 	case string:
 		b = append(b, '"')
-		b = append(b, []byte(stringFieldEscaper.Replace(v))...)
+		b = append(b, []byte(escape(v, "fieldval"))...)
 		b = append(b, '"')
 	case bool:
 		b = strconv.AppendBool(b, v)
@@ -497,12 +480,10 @@ func appendField(b []byte, k string, v interface{}) []byte {
 		b = strconv.AppendFloat(b, float64(v), 'f', -1, 32)
 	case []byte:
 		b = append(b, v...)
-	case nil:
-		// skip
 	default:
 		// Can't determine the type, so convert to string
 		b = append(b, '"')
-		b = append(b, []byte(stringFieldEscaper.Replace(fmt.Sprintf("%v", v)))...)
+		b = append(b, []byte(escape(fmt.Sprintf("%v", v), "fieldval"))...)
 		b = append(b, '"')
 	}
 
diff --git a/metric/metric_benchmark_test.go b/metric/metric_benchmark_test.go
index 90c8f001..86906483 100644
--- a/metric/metric_benchmark_test.go
+++ b/metric/metric_benchmark_test.go
@@ -36,6 +36,20 @@ func BenchmarkNewMetric(b *testing.B) {
 	s = string(mt.String())
 }
 
+func BenchmarkAddTag(b *testing.B) {
+	var mt telegraf.Metric
+	mt = &metric{
+		name:   []byte("cpu"),
+		tags:   []byte(",host=localhost"),
+		fields: []byte("a=101"),
+		t:      []byte("1480614053000000000"),
+	}
+	for n := 0; n < b.N; n++ {
+		mt.AddTag("foo", "bar")
+	}
+	s = string(mt.String())
+}
+
 func BenchmarkTags(b *testing.B) {
 	for n := 0; n < b.N; n++ {
 		var mt, _ = New("test_metric",
diff --git a/metric/metric_test.go b/metric/metric_test.go
index f6066973..b373cabd 100644
--- a/metric/metric_test.go
+++ b/metric/metric_test.go
@@ -33,6 +33,363 @@ func TestNewMetric(t *testing.T) {
 	assert.Equal(t, now.UnixNano(), m.UnixNano())
 }
 
+func TestNewErrors(t *testing.T) {
+	// creating a metric with an empty name produces an error:
+	m, err := New(
+		"",
+		map[string]string{
+			"datacenter": "us-east-1",
+			"mytag":      "foo",
+			"another":    "tag",
+		},
+		map[string]interface{}{
+			"value": float64(1),
+		},
+		time.Now(),
+	)
+	assert.Error(t, err)
+	assert.Nil(t, m)
+
+	// creating a metric with empty fields produces an error:
+	m, err = New(
+		"foobar",
+		map[string]string{
+			"datacenter": "us-east-1",
+			"mytag":      "foo",
+			"another":    "tag",
+		},
+		map[string]interface{}{},
+		time.Now(),
+	)
+	assert.Error(t, err)
+	assert.Nil(t, m)
+}
+
+func TestNewMetric_Tags(t *testing.T) {
+	now := time.Now()
+	tags := map[string]string{
+		"host":       "localhost",
+		"datacenter": "us-east-1",
+	}
+	fields := map[string]interface{}{
+		"value": float64(1),
+	}
+	m, err := New("cpu", tags, fields, now)
+	assert.NoError(t, err)
+
+	assert.True(t, m.HasTag("host"))
+	assert.True(t, m.HasTag("datacenter"))
+
+	m.AddTag("newtag", "foo")
+	assert.True(t, m.HasTag("newtag"))
+
+	m.RemoveTag("host")
+	assert.False(t, m.HasTag("host"))
+	assert.True(t, m.HasTag("newtag"))
+	assert.True(t, m.HasTag("datacenter"))
+
+	m.RemoveTag("datacenter")
+	assert.False(t, m.HasTag("datacenter"))
+	assert.True(t, m.HasTag("newtag"))
+	assert.Equal(t, map[string]string{"newtag": "foo"}, m.Tags())
+
+	m.RemoveTag("newtag")
+	assert.False(t, m.HasTag("newtag"))
+	assert.Equal(t, map[string]string{}, m.Tags())
+
+	assert.Equal(t, "cpu value=1 "+fmt.Sprint(now.UnixNano())+"\n", m.String())
+}
+
+func TestSerialize(t *testing.T) {
+	now := time.Now()
+	tags := map[string]string{
+		"datacenter": "us-east-1",
+	}
+	fields := map[string]interface{}{
+		"value": float64(1),
+	}
+	m, err := New("cpu", tags, fields, now)
+	assert.NoError(t, err)
+
+	assert.Equal(t,
+		[]byte("cpu,datacenter=us-east-1 value=1 "+fmt.Sprint(now.UnixNano())+"\n"),
+		m.Serialize())
+
+	m.RemoveTag("datacenter")
+	assert.Equal(t,
+		[]byte("cpu value=1 "+fmt.Sprint(now.UnixNano())+"\n"),
+		m.Serialize())
+}
+
+func TestHashID(t *testing.T) {
+	m, _ := New(
+		"cpu",
+		map[string]string{
+			"datacenter": "us-east-1",
+			"mytag":      "foo",
+			"another":    "tag",
+		},
+		map[string]interface{}{
+			"value": float64(1),
+		},
+		time.Now(),
+	)
+	hash := m.HashID()
+
+	// adding a field doesn't change the hash:
+	m.AddField("foo", int64(100))
+	assert.Equal(t, hash, m.HashID())
+
+	// removing a non-existent tag doesn't change the hash:
+	m.RemoveTag("no-op")
+	assert.Equal(t, hash, m.HashID())
+
+	// adding a tag does change it:
+	m.AddTag("foo", "bar")
+	assert.NotEqual(t, hash, m.HashID())
+	hash = m.HashID()
+
+	// removing a tag also changes it:
+	m.RemoveTag("mytag")
+	assert.NotEqual(t, hash, m.HashID())
+}
+
+func TestHashID_Consistency(t *testing.T) {
+	m, _ := New(
+		"cpu",
+		map[string]string{
+			"datacenter": "us-east-1",
+			"mytag":      "foo",
+			"another":    "tag",
+		},
+		map[string]interface{}{
+			"value": float64(1),
+		},
+		time.Now(),
+	)
+	hash := m.HashID()
+
+	for i := 0; i < 1000; i++ {
+		m2, _ := New(
+			"cpu",
+			map[string]string{
+				"datacenter": "us-east-1",
+				"mytag":      "foo",
+				"another":    "tag",
+			},
+			map[string]interface{}{
+				"value": float64(1),
+			},
+			time.Now(),
+		)
+		assert.Equal(t, hash, m2.HashID())
+	}
+}
+
+func TestNewMetric_NameModifiers(t *testing.T) {
+	now := time.Now()
+	tags := map[string]string{}
+	fields := map[string]interface{}{
+		"value": float64(1),
+	}
+	m, err := New("cpu", tags, fields, now)
+	assert.NoError(t, err)
+
+	hash := m.HashID()
+	suffix := fmt.Sprintf(" value=1 %d\n", now.UnixNano())
+	assert.Equal(t, "cpu"+suffix, m.String())
+
+	m.SetPrefix("pre_")
+	assert.NotEqual(t, hash, m.HashID())
+	hash = m.HashID()
+	assert.Equal(t, "pre_cpu"+suffix, m.String())
+
+	m.SetSuffix("_post")
+	assert.NotEqual(t, hash, m.HashID())
+	hash = m.HashID()
+	assert.Equal(t, "pre_cpu_post"+suffix, m.String())
+
+	m.SetName("mem")
+	assert.NotEqual(t, hash, m.HashID())
+	assert.Equal(t, "mem"+suffix, m.String())
+}
+
+func TestNewMetric_FieldModifiers(t *testing.T) {
+	now := time.Now()
+	tags := map[string]string{
+		"host": "localhost",
+	}
+	fields := map[string]interface{}{
+		"value": float64(1),
+	}
+	m, err := New("cpu", tags, fields, now)
+	assert.NoError(t, err)
+
+	assert.True(t, m.HasField("value"))
+	assert.False(t, m.HasField("foo"))
+
+	m.AddField("newfield", "foo")
+	assert.True(t, m.HasField("newfield"))
+
+	assert.NoError(t, m.RemoveField("newfield"))
+	assert.False(t, m.HasField("newfield"))
+
+	// don't allow user to remove all fields:
+	assert.Error(t, m.RemoveField("value"))
+
+	m.AddField("value2", int64(101))
+	assert.NoError(t, m.RemoveField("value"))
+	assert.False(t, m.HasField("value"))
+}
+
+func TestNewMetric_Fields(t *testing.T) {
+	now := time.Now()
+	tags := map[string]string{
+		"host": "localhost",
+	}
+	fields := map[string]interface{}{
+		"float":  float64(1),
+		"int":    int64(1),
+		"bool":   true,
+		"false":  false,
+		"string": "test",
+	}
+	m, err := New("cpu", tags, fields, now)
+	assert.NoError(t, err)
+
+	assert.Equal(t, fields, m.Fields())
+}
+
+func TestNewMetric_Time(t *testing.T) {
+	now := time.Now()
+	tags := map[string]string{
+		"host": "localhost",
+	}
+	fields := map[string]interface{}{
+		"float":  float64(1),
+		"int":    int64(1),
+		"bool":   true,
+		"false":  false,
+		"string": "test",
+	}
+	m, err := New("cpu", tags, fields, now)
+	assert.NoError(t, err)
+	m = m.Copy()
+	m2 := m.Copy()
+
+	assert.Equal(t, now.UnixNano(), m.Time().UnixNano())
+	assert.Equal(t, now.UnixNano(), m2.UnixNano())
+}
+
+func TestNewMetric_Copy(t *testing.T) {
+	now := time.Now()
+	tags := map[string]string{}
+	fields := map[string]interface{}{
+		"float": float64(1),
+	}
+	m, err := New("cpu", tags, fields, now)
+	assert.NoError(t, err)
+	m2 := m.Copy()
+
+	assert.Equal(t,
+		fmt.Sprintf("cpu float=1 %d\n", now.UnixNano()),
+		m.String())
+	m.AddTag("host", "localhost")
+	assert.Equal(t,
+		fmt.Sprintf("cpu,host=localhost float=1 %d\n", now.UnixNano()),
+		m.String())
+
+	assert.Equal(t,
+		fmt.Sprintf("cpu float=1 %d\n", now.UnixNano()),
+		m2.String())
+}
+
+func TestNewMetric_AllTypes(t *testing.T) {
+	now := time.Now()
+	tags := map[string]string{}
+	fields := map[string]interface{}{
+		"float64":     float64(1),
+		"float32":     float32(1),
+		"int64":       int64(1),
+		"int32":       int32(1),
+		"int16":       int16(1),
+		"int8":        int8(1),
+		"int":         int(1),
+		"uint64":      uint64(1),
+		"uint32":      uint32(1),
+		"uint16":      uint16(1),
+		"uint8":       uint8(1),
+		"uint":        uint(1),
+		"bytes":       []byte("foo"),
+		"nil":         nil,
+		"maxuint64":   uint64(MaxInt) + 10,
+		"maxuint":     uint(MaxInt) + 10,
+		"unsupported": []int{1, 2},
+	}
+	m, err := New("cpu", tags, fields, now)
+	assert.NoError(t, err)
+
+	assert.Contains(t, m.String(), "float64=1")
+	assert.Contains(t, m.String(), "float32=1")
+	assert.Contains(t, m.String(), "int64=1i")
+	assert.Contains(t, m.String(), "int32=1i")
+	assert.Contains(t, m.String(), "int16=1i")
+	assert.Contains(t, m.String(), "int8=1i")
+	assert.Contains(t, m.String(), "int=1i")
+	assert.Contains(t, m.String(), "uint64=1i")
+	assert.Contains(t, m.String(), "uint32=1i")
+	assert.Contains(t, m.String(), "uint16=1i")
+	assert.Contains(t, m.String(), "uint8=1i")
+	assert.Contains(t, m.String(), "uint=1i")
+	assert.NotContains(t, m.String(), "nil")
+	assert.Contains(t, m.String(), fmt.Sprintf("maxuint64=%di", MaxInt))
+	assert.Contains(t, m.String(), fmt.Sprintf("maxuint=%di", MaxInt))
+}
+
+func TestIndexUnescapedByte(t *testing.T) {
+	tests := []struct {
+		in       []byte
+		b        byte
+		expected int
+	}{
+		{
+			in:       []byte(`foobar`),
+			b:        'b',
+			expected: 3,
+		},
+		{
+			in:       []byte(`foo\bar`),
+			b:        'b',
+			expected: -1,
+		},
+		{
+			in:       []byte(`foo\\bar`),
+			b:        'b',
+			expected: 5,
+		},
+		{
+			in:       []byte(`foobar`),
+			b:        'f',
+			expected: 0,
+		},
+		{
+			in:       []byte(`foobar`),
+			b:        'r',
+			expected: 5,
+		},
+		{
+			in:       []byte(`\foobar`),
+			b:        'f',
+			expected: -1,
+		},
+	}
+
+	for _, test := range tests {
+		got := indexUnescapedByte(test.in, test.b)
+		assert.Equal(t, test.expected, got)
+	}
+}
+
 func TestNewGaugeMetric(t *testing.T) {
 	now := time.Now()
 
@@ -77,6 +434,42 @@ func TestNewCounterMetric(t *testing.T) {
 	assert.Equal(t, now.UnixNano(), m.UnixNano())
 }
 
+func TestNewMetricAggregate(t *testing.T) {
+	now := time.Now()
+
+	tags := map[string]string{
+		"host": "localhost",
+	}
+	fields := map[string]interface{}{
+		"usage_idle": float64(99),
+	}
+	m, err := New("cpu", tags, fields, now)
+	assert.NoError(t, err)
+
+	assert.False(t, m.IsAggregate())
+	m.SetAggregate(true)
+	assert.True(t, m.IsAggregate())
+}
+
+func TestNewMetricPoint(t *testing.T) {
+	now := time.Now()
+
+	tags := map[string]string{
+		"host": "localhost",
+	}
+	fields := map[string]interface{}{
+		"usage_idle": float64(99),
+	}
+	m, err := New("cpu", tags, fields, now)
+	assert.NoError(t, err)
+
+	p := m.Point()
+
+	assert.Equal(t, fields, m.Fields())
+	assert.Equal(t, fields, p.Fields())
+	assert.Equal(t, "cpu", p.Name())
+}
+
 func TestNewMetricString(t *testing.T) {
 	now := time.Now()
 
diff --git a/metric/parse.go b/metric/parse.go
index 0b105024..fe2cffdc 100644
--- a/metric/parse.go
+++ b/metric/parse.go
@@ -44,6 +44,9 @@ func Parse(buf []byte) ([]telegraf.Metric, error) {
 }
 
 func ParseWithDefaultTime(buf []byte, t time.Time) ([]telegraf.Metric, error) {
+	if len(buf) <= 6 {
+		return []telegraf.Metric{}, makeError("buffer too short", buf, 0)
+	}
 	metrics := make([]telegraf.Metric, 0, bytes.Count(buf, []byte("\n"))+1)
 	var errStr string
 	i := 0
@@ -169,14 +172,14 @@ func scanMeasurement(buf []byte, i int) (int, int, error) {
 	// It can't be a space, since whitespace is stripped prior to this
 	// function call.
 	if i >= len(buf) || buf[i] == ',' {
-		return -1, i, fmt.Errorf("missing measurement")
+		return -1, i, makeError("missing measurement", buf, i)
 	}
 
 	for {
 		i++
 		if i >= len(buf) {
 			// cpu
-			return -1, i, fmt.Errorf("missing fields")
+			return -1, i, makeError("missing fields", buf, i)
 		}
 
 		if buf[i-1] == '\\' {
@@ -228,7 +231,7 @@ func scanTagsKey(buf []byte, i int) (int, error) {
 	// First character of the key.
 	if i >= len(buf) || buf[i] == ' ' || buf[i] == ',' || buf[i] == '=' {
 		// cpu,{'', ' ', ',', '='}
-		return i, fmt.Errorf("missing tag key")
+		return i, makeError("missing tag key", buf, i)
 	}
 
 	// Examine each character in the tag key until we hit an unescaped
@@ -242,7 +245,7 @@ func scanTagsKey(buf []byte, i int) (int, error) {
 		if i >= len(buf) ||
 			((buf[i] == ' ' || buf[i] == ',') && buf[i-1] != '\\') {
 			// cpu,tag{'', ' ', ','}
-			return i, fmt.Errorf("missing tag value")
+			return i, makeError("missing tag value", buf, i)
 		}
 
 		if buf[i] == '=' && buf[i-1] != '\\' {
@@ -257,7 +260,7 @@ func scanTagsValue(buf []byte, i int) (int, int, error) {
 	// Tag value cannot be empty.
 	if i >= len(buf) || buf[i] == ',' || buf[i] == ' ' {
 		// cpu,tag={',', ' '}
-		return -1, i, fmt.Errorf("missing tag value")
+		return -1, i, makeError("missing tag value", buf, i)
 	}
 
 	// Examine each character in the tag value until we hit an unescaped
@@ -267,13 +270,13 @@ func scanTagsValue(buf []byte, i int) (int, int, error) {
 		i++
 		if i >= len(buf) {
 			// cpu,tag=value
-			return -1, i, fmt.Errorf("missing fields")
+			return -1, i, makeError("missing fields", buf, i)
 		}
 
 		// An unescaped equals sign is an invalid tag value.
 		if buf[i] == '=' && buf[i-1] != '\\' {
 			// cpu,tag={'=', 'fo=o'}
-			return -1, i, fmt.Errorf("invalid tag format")
+			return -1, i, makeError("invalid tag format", buf, i)
 		}
 
 		if buf[i] == ',' && buf[i-1] != '\\' {
@@ -329,22 +332,22 @@ func scanFields(buf []byte, i int) (int, []byte, error) {
 
 			// check for "... =123" but allow "a\ =123"
 			if buf[i-1] == ' ' && buf[i-2] != '\\' {
-				return i, buf[start:i], fmt.Errorf("missing field key")
+				return i, buf[start:i], makeError("missing field key", buf, i)
 			}
 
 			// check for "...a=123,=456" but allow "a=123,a\,=456"
 			if buf[i-1] == ',' && buf[i-2] != '\\' {
-				return i, buf[start:i], fmt.Errorf("missing field key")
+				return i, buf[start:i], makeError("missing field key", buf, i)
 			}
 
 			// check for "... value="
 			if i+1 >= len(buf) {
-				return i, buf[start:i], fmt.Errorf("missing field value")
+				return i, buf[start:i], makeError("missing field value", buf, i)
 			}
 
 			// check for "... value=,value2=..."
 			if buf[i+1] == ',' || buf[i+1] == ' ' {
-				return i, buf[start:i], fmt.Errorf("missing field value")
+				return i, buf[start:i], makeError("missing field value", buf, i)
 			}
 
 			if isNumeric(buf[i+1]) || buf[i+1] == '-' || buf[i+1] == 'N' || buf[i+1] == 'n' {
@@ -378,12 +381,12 @@ func scanFields(buf []byte, i int) (int, []byte, error) {
 	}
 
 	if quoted {
-		return i, buf[start:i], fmt.Errorf("unbalanced quotes")
+		return i, buf[start:i], makeError("unbalanced quotes", buf, i)
 	}
 
 	// check that all field sections had key and values (e.g. prevent "a=1,b"
 	if equals == 0 || commas != equals-1 {
-		return i, buf[start:i], fmt.Errorf("invalid field format")
+		return i, buf[start:i], makeError("invalid field format", buf, i)
 	}
 
 	return i, buf[start:i], nil
@@ -416,7 +419,7 @@ func scanTime(buf []byte, i int) (int, []byte, error) {
 		// Timestamps should be integers, make sure they are so we don't need
 		// to actually  parse the timestamp until needed.
 		if buf[i] < '0' || buf[i] > '9' {
-			return i, buf[start:i], fmt.Errorf("bad timestamp")
+			return i, buf[start:i], makeError("invalid timestamp", buf, i)
 		}
 		i++
 	}
@@ -528,14 +531,14 @@ func scanNumber(buf []byte, i int) (int, error) {
 		// We subtract 1 from the index to remove the `i` from our tests
 		if len(buf[start:i-1]) >= maxInt64Digits || len(buf[start:i-1]) >= minInt64Digits {
 			if _, err := parseIntBytes(buf[start:i-1], 10, 64); err != nil {
-				return i, fmt.Errorf("unable to parse integer %s: %s", buf[start:i-1], err)
+				return i, makeError(fmt.Sprintf("unable to parse integer %s: %s", buf[start:i-1], err), buf, i)
 			}
 		}
 	} else {
 		// Parse the float to check bounds if it's scientific or the number of digits could be larger than the max range
 		if scientific || len(buf[start:i]) >= maxFloat64Digits || len(buf[start:i]) >= minFloat64Digits {
 			if _, err := parseFloatBytes(buf[start:i], 10); err != nil {
-				return i, fmt.Errorf("invalid float")
+				return i, makeError("invalid float", buf, i)
 			}
 		}
 	}
@@ -551,7 +554,7 @@ func scanBoolean(buf []byte, i int) (int, []byte, error) {
 	start := i
 
 	if i < len(buf) && (buf[i] != 't' && buf[i] != 'f' && buf[i] != 'T' && buf[i] != 'F') {
-		return i, buf[start:i], fmt.Errorf("invalid boolean")
+		return i, buf[start:i], makeError("invalid value", buf, i)
 	}
 
 	i++
@@ -573,12 +576,12 @@ func scanBoolean(buf []byte, i int) (int, []byte, error) {
 
 	// length must be 4 for true or TRUE
 	if (buf[start] == 't' || buf[start] == 'T') && i-start != 4 {
-		return i, buf[start:i], fmt.Errorf("invalid boolean")
+		return i, buf[start:i], makeError("invalid boolean", buf, i)
 	}
 
 	// length must be 5 for false or FALSE
 	if (buf[start] == 'f' || buf[start] == 'F') && i-start != 5 {
-		return i, buf[start:i], fmt.Errorf("invalid boolean")
+		return i, buf[start:i], makeError("invalid boolean", buf, i)
 	}
 
 	// Otherwise
@@ -595,7 +598,7 @@ func scanBoolean(buf []byte, i int) (int, []byte, error) {
 	}
 
 	if !valid {
-		return i, buf[start:i], fmt.Errorf("invalid boolean")
+		return i, buf[start:i], makeError("invalid boolean", buf, i)
 	}
 
 	return i, buf[start:i], nil
diff --git a/metric/parse_test.go b/metric/parse_test.go
new file mode 100644
index 00000000..8b7a8ff4
--- /dev/null
+++ b/metric/parse_test.go
@@ -0,0 +1,355 @@
+package metric
+
+import (
+	"testing"
+	"time"
+
+	"github.com/stretchr/testify/assert"
+)
+
+const trues = `booltest b=T
+booltest b=t
+booltest b=True
+booltest b=TRUE
+booltest b=true
+`
+
+const falses = `booltest b=F
+booltest b=f
+booltest b=False
+booltest b=FALSE
+booltest b=false
+`
+
+const withEscapes = `w\,\ eather,host=local temp=99 1465839830100400200
+w\,eather,host=local temp=99 1465839830100400200
+weather,location=us\,midwest temperature=82 1465839830100400200
+weather,location=us-midwest temp\=rature=82 1465839830100400200
+weather,location\ place=us-midwest temperature=82 1465839830100400200
+weather,location=us-midwest temperature="too\"hot\"" 1465839830100400200
+`
+
+const withTimestamps = `cpu usage=99 1480595849000000000
+cpu usage=99 1480595850000000000
+cpu usage=99 1480595851700030000
+cpu usage=99 1480595852000000300
+`
+
+const sevenMetrics = `cpu,host=foo,datacenter=us-east idle=99,busy=1i,b=true,s="string"
+cpu,host=foo,datacenter=us-east idle=99,busy=1i,b=true,s="string"
+cpu,host=foo,datacenter=us-east idle=99,busy=1i,b=true,s="string"
+cpu,host=foo,datacenter=us-east idle=99,busy=1i,b=true,s="string"
+cpu,host=foo,datacenter=us-east idle=99,busy=1i,b=true,s="string"
+cpu,host=foo,datacenter=us-east idle=99,busy=1i,b=true,s="string"
+cpu,host=foo,datacenter=us-east idle=99,busy=1i,b=true,s="string"
+`
+
+// some metrics are invalid
+const someInvalid = `cpu,host=foo,datacenter=us-east usage_idle=99,usage_busy=1
+cpu,host=foo,datacenter=us-east usage_idle=99,usage_busy=1
+cpu,host=foo,datacenter=us-east usage_idle=99,usage_busy=1
+cpu,cpu=cpu3, host=foo,datacenter=us-east usage_idle=99,usage_busy=1
+cpu,cpu=cpu4 , usage_idle=99,usage_busy=1
+cpu 1480595852000000300
+cpu usage=99 1480595852foobar300
+cpu,host=foo,datacenter=us-east usage_idle=99,usage_busy=1
+`
+
+func TestParse(t *testing.T) {
+	start := time.Now()
+	metrics, err := Parse([]byte(sevenMetrics))
+	assert.NoError(t, err)
+	assert.Len(t, metrics, 7)
+
+	// all metrics parsed together w/o a timestamp should have the same time.
+	firstTime := metrics[0].Time()
+	for _, m := range metrics {
+		assert.Equal(t,
+			map[string]interface{}{
+				"idle": float64(99),
+				"busy": int64(1),
+				"b":    true,
+				"s":    "string",
+			},
+			m.Fields(),
+		)
+		assert.Equal(t,
+			map[string]string{
+				"host":       "foo",
+				"datacenter": "us-east",
+			},
+			m.Tags(),
+		)
+		assert.True(t, m.Time().After(start))
+		assert.True(t, m.Time().Equal(firstTime))
+	}
+}
+
+func TestParseErrors(t *testing.T) {
+	start := time.Now()
+	metrics, err := Parse([]byte(someInvalid))
+	assert.Error(t, err)
+	assert.Len(t, metrics, 4)
+
+	// all metrics parsed together w/o a timestamp should have the same time.
+	firstTime := metrics[0].Time()
+	for _, m := range metrics {
+		assert.Equal(t,
+			map[string]interface{}{
+				"usage_idle": float64(99),
+				"usage_busy": float64(1),
+			},
+			m.Fields(),
+		)
+		assert.Equal(t,
+			map[string]string{
+				"host":       "foo",
+				"datacenter": "us-east",
+			},
+			m.Tags(),
+		)
+		assert.True(t, m.Time().After(start))
+		assert.True(t, m.Time().Equal(firstTime))
+	}
+}
+
+func TestParseWithTimestamps(t *testing.T) {
+	metrics, err := Parse([]byte(withTimestamps))
+	assert.NoError(t, err)
+	assert.Len(t, metrics, 4)
+
+	expectedTimestamps := []time.Time{
+		time.Unix(0, 1480595849000000000),
+		time.Unix(0, 1480595850000000000),
+		time.Unix(0, 1480595851700030000),
+		time.Unix(0, 1480595852000000300),
+	}
+
+	// all metrics parsed together w/o a timestamp should have the same time.
+	for i, m := range metrics {
+		assert.Equal(t,
+			map[string]interface{}{
+				"usage": float64(99),
+			},
+			m.Fields(),
+		)
+		assert.True(t, m.Time().Equal(expectedTimestamps[i]))
+	}
+}
+
+func TestParseEscapes(t *testing.T) {
+	metrics, err := Parse([]byte(withEscapes))
+	assert.NoError(t, err)
+	assert.Len(t, metrics, 6)
+
+	tests := []struct {
+		name   string
+		fields map[string]interface{}
+		tags   map[string]string
+	}{
+		{
+			name:   `w, eather`,
+			fields: map[string]interface{}{"temp": float64(99)},
+			tags:   map[string]string{"host": "local"},
+		},
+		{
+			name:   `w,eather`,
+			fields: map[string]interface{}{"temp": float64(99)},
+			tags:   map[string]string{"host": "local"},
+		},
+		{
+			name:   `weather`,
+			fields: map[string]interface{}{"temperature": float64(82)},
+			tags:   map[string]string{"location": `us,midwest`},
+		},
+		{
+			name:   `weather`,
+			fields: map[string]interface{}{`temp=rature`: float64(82)},
+			tags:   map[string]string{"location": `us-midwest`},
+		},
+		{
+			name:   `weather`,
+			fields: map[string]interface{}{"temperature": float64(82)},
+			tags:   map[string]string{`location place`: `us-midwest`},
+		},
+		{
+			name:   `weather`,
+			fields: map[string]interface{}{`temperature`: `too"hot"`},
+			tags:   map[string]string{"location": `us-midwest`},
+		},
+	}
+
+	for i, test := range tests {
+		assert.Equal(t, test.name, metrics[i].Name())
+		assert.Equal(t, test.fields, metrics[i].Fields())
+		assert.Equal(t, test.tags, metrics[i].Tags())
+	}
+}
+
+func TestParseTrueBooleans(t *testing.T) {
+	metrics, err := Parse([]byte(trues))
+	assert.NoError(t, err)
+	assert.Len(t, metrics, 5)
+
+	for _, metric := range metrics {
+		assert.Equal(t, "booltest", metric.Name())
+		assert.Equal(t, true, metric.Fields()["b"])
+	}
+}
+
+func TestParseFalseBooleans(t *testing.T) {
+	metrics, err := Parse([]byte(falses))
+	assert.NoError(t, err)
+	assert.Len(t, metrics, 5)
+
+	for _, metric := range metrics {
+		assert.Equal(t, "booltest", metric.Name())
+		assert.Equal(t, false, metric.Fields()["b"])
+	}
+}
+
+func TestParsePointBadNumber(t *testing.T) {
+	for _, tt := range []string{
+		"cpu v=- ",
+		"cpu v=-i ",
+		"cpu v=-. ",
+		"cpu v=. ",
+		"cpu v=1.0i ",
+		"cpu v=1ii ",
+		"cpu v=1a ",
+		"cpu v=-e-e-e ",
+		"cpu v=42+3 ",
+		"cpu v= ",
+	} {
+		_, err := Parse([]byte(tt + "\n"))
+		assert.Error(t, err, tt)
+	}
+}
+
+func TestParseTagsMissingParts(t *testing.T) {
+	for _, tt := range []string{
+		`cpu,host`,
+		`cpu,host,`,
+		`cpu,host=`,
+		`cpu,f=oo=bar value=1`,
+		`cpu,host value=1i`,
+		`cpu,host=serverA,region value=1i`,
+		`cpu,host=serverA,region= value=1i`,
+		`cpu,host=serverA,region=,zone=us-west value=1i`,
+		`cpu, value=1`,
+		`cpu, ,,`,
+		`cpu,,,`,
+		`cpu,host=serverA,=us-east value=1i`,
+		`cpu,host=serverAa\,,=us-east value=1i`,
+		`cpu,host=serverA\,,=us-east value=1i`,
+		`cpu, =serverA value=1i`,
+	} {
+		_, err := Parse([]byte(tt + "\n"))
+		assert.Error(t, err, tt)
+	}
+}
+
+func TestParsePointWhitespace(t *testing.T) {
+	for _, tt := range []string{
+		`cpu    value=1.0 1257894000000000000`,
+		`cpu value=1.0     1257894000000000000`,
+		`cpu      value=1.0     1257894000000000000`,
+		`cpu value=1.0 1257894000000000000   `,
+	} {
+		m, err := Parse([]byte(tt + "\n"))
+		assert.NoError(t, err, tt)
+		assert.Equal(t, "cpu", m[0].Name())
+		assert.Equal(t, map[string]interface{}{"value": float64(1)}, m[0].Fields())
+	}
+}
+
+func TestParsePointInvalidFields(t *testing.T) {
+	for _, tt := range []string{
+		"test,foo=bar a=101,=value",
+		"test,foo=bar =value",
+		"test,foo=bar a=101,key=",
+		"test,foo=bar key=",
+		`test,foo=bar a=101,b="foo`,
+	} {
+		_, err := Parse([]byte(tt + "\n"))
+		assert.Error(t, err, tt)
+	}
+}
+
+func TestParsePointNoFields(t *testing.T) {
+	for _, tt := range []string{
+		"cpu_load_short,host=server01,region=us-west",
+		"very_long_measurement_name",
+		"cpu,host==",
+		"============",
+		"cpu",
+		"cpu\n\n\n\n\n\n\n",
+		"                ",
+	} {
+		_, err := Parse([]byte(tt + "\n"))
+		assert.Error(t, err, tt)
+	}
+}
+
+// a b=1 << this is the shortest possible metric
+// any shorter is just ignored
+func TestParseBufTooShort(t *testing.T) {
+	for _, tt := range []string{
+		"",
+		"a",
+		"a ",
+		"a b=",
+	} {
+		_, err := Parse([]byte(tt + "\n"))
+		assert.Error(t, err, tt)
+	}
+}
+
+func TestParseInvalidBooleans(t *testing.T) {
+	for _, tt := range []string{
+		"test b=tru",
+		"test b=fals",
+		"test b=faLse",
+		"test q=foo",
+		"test b=lambchops",
+	} {
+		_, err := Parse([]byte(tt + "\n"))
+		assert.Error(t, err, tt)
+	}
+}
+
+func TestParseInvalidNumbers(t *testing.T) {
+	for _, tt := range []string{
+		"test b=-",
+		"test b=1.1.1",
+		"test b=nan",
+		"test b=9i10",
+		"test b=9999999999999999999i",
+	} {
+		_, err := Parse([]byte(tt + "\n"))
+		assert.Error(t, err, tt)
+	}
+}
+
+func TestParseNegativeTimestamps(t *testing.T) {
+	for _, tt := range []string{
+		"test foo=101 -1257894000000000000",
+	} {
+		metrics, err := Parse([]byte(tt + "\n"))
+		assert.NoError(t, err, tt)
+		assert.True(t, metrics[0].Time().Equal(time.Unix(0, -1257894000000000000)))
+	}
+}
+
+func TestParseMaxKeyLength(t *testing.T) {
+	key := ""
+	for {
+		if len(key) > MaxKeyLength {
+			break
+		}
+		key += "test"
+	}
+
+	_, err := Parse([]byte(key + " value=1\n"))
+	assert.Error(t, err)
+}
diff --git a/plugins/serializers/json/json_test.go b/plugins/serializers/json/json_test.go
index 9370e4da..e44952b6 100644
--- a/plugins/serializers/json/json_test.go
+++ b/plugins/serializers/json/json_test.go
@@ -2,7 +2,6 @@ package json
 
 import (
 	"fmt"
-	"strings"
 	"testing"
 	"time"
 
@@ -25,10 +24,9 @@ func TestSerializeMetricFloat(t *testing.T) {
 	s := JsonSerializer{}
 	var buf []byte
 	buf, err = s.Serialize(m)
-	mS := strings.Split(strings.TrimSpace(string(buf)), "\n")
 	assert.NoError(t, err)
-	expS := []string{fmt.Sprintf("{\"fields\":{\"usage_idle\":91.5},\"name\":\"cpu\",\"tags\":{\"cpu\":\"cpu0\"},\"timestamp\":%d}", now.Unix())}
-	assert.Equal(t, expS, mS)
+	expS := []byte(fmt.Sprintf(`{"fields":{"usage_idle":91.5},"name":"cpu","tags":{"cpu":"cpu0"},"timestamp":%d}`, now.Unix()) + "\n")
+	assert.Equal(t, string(expS), string(buf))
 }
 
 func TestSerializeMetricInt(t *testing.T) {
@@ -45,11 +43,10 @@ func TestSerializeMetricInt(t *testing.T) {
 	s := JsonSerializer{}
 	var buf []byte
 	buf, err = s.Serialize(m)
-	mS := strings.Split(strings.TrimSpace(string(buf)), "\n")
 	assert.NoError(t, err)
 
-	expS := []string{fmt.Sprintf("{\"fields\":{\"usage_idle\":90},\"name\":\"cpu\",\"tags\":{\"cpu\":\"cpu0\"},\"timestamp\":%d}", now.Unix())}
-	assert.Equal(t, expS, mS)
+	expS := []byte(fmt.Sprintf(`{"fields":{"usage_idle":90},"name":"cpu","tags":{"cpu":"cpu0"},"timestamp":%d}`, now.Unix()) + "\n")
+	assert.Equal(t, string(expS), string(buf))
 }
 
 func TestSerializeMetricString(t *testing.T) {
@@ -66,11 +63,10 @@ func TestSerializeMetricString(t *testing.T) {
 	s := JsonSerializer{}
 	var buf []byte
 	buf, err = s.Serialize(m)
-	mS := strings.Split(strings.TrimSpace(string(buf)), "\n")
 	assert.NoError(t, err)
 
-	expS := []string{fmt.Sprintf("{\"fields\":{\"usage_idle\":\"foobar\"},\"name\":\"cpu\",\"tags\":{\"cpu\":\"cpu0\"},\"timestamp\":%d}", now.Unix())}
-	assert.Equal(t, expS, mS)
+	expS := []byte(fmt.Sprintf(`{"fields":{"usage_idle":"foobar"},"name":"cpu","tags":{"cpu":"cpu0"},"timestamp":%d}`, now.Unix()) + "\n")
+	assert.Equal(t, string(expS), string(buf))
 }
 
 func TestSerializeMultiFields(t *testing.T) {
@@ -88,11 +84,10 @@ func TestSerializeMultiFields(t *testing.T) {
 	s := JsonSerializer{}
 	var buf []byte
 	buf, err = s.Serialize(m)
-	mS := strings.Split(strings.TrimSpace(string(buf)), "\n")
 	assert.NoError(t, err)
 
-	expS := []string{fmt.Sprintf("{\"fields\":{\"usage_idle\":90,\"usage_total\":8559615},\"name\":\"cpu\",\"tags\":{\"cpu\":\"cpu0\"},\"timestamp\":%d}", now.Unix())}
-	assert.Equal(t, expS, mS)
+	expS := []byte(fmt.Sprintf(`{"fields":{"usage_idle":90,"usage_total":8559615},"name":"cpu","tags":{"cpu":"cpu0"},"timestamp":%d}`, now.Unix()) + "\n")
+	assert.Equal(t, string(expS), string(buf))
 }
 
 func TestSerializeMetricWithEscapes(t *testing.T) {
@@ -103,13 +98,13 @@ func TestSerializeMetricWithEscapes(t *testing.T) {
 	fields := map[string]interface{}{
 		"U,age=Idle": int64(90),
 	}
-	m, err := telegraf.NewMetric("My CPU", tags, fields, now)
+	m, err := metric.New("My CPU", tags, fields, now)
 	assert.NoError(t, err)
 
 	s := JsonSerializer{}
-	mS, err := s.Serialize(m)
+	buf, err := s.Serialize(m)
 	assert.NoError(t, err)
 
-	expS := []string{fmt.Sprintf(`{"fields":{"U,age=Idle":90},"name":"My CPU","tags":{"cpu tag":"cpu0"},"timestamp":%d}`, now.Unix())}
-	assert.Equal(t, expS, mS)
+	expS := []byte(fmt.Sprintf(`{"fields":{"U,age=Idle":90},"name":"My CPU","tags":{"cpu tag":"cpu0"},"timestamp":%d}`, now.Unix()) + "\n")
+	assert.Equal(t, string(expS), string(buf))
 }
-- 
GitLab