Reset gojson to golang/go|1.23.4 [removes all custom changes]

This commit is contained in:
Mike Schwörer 2025-01-10 11:49:29 +01:00
parent b7c48cb467
commit c8e9c34706
Signed by: Mikescher
GPG Key ID: D3C7172E0A70F8CF
18 changed files with 2603 additions and 2512 deletions

View File

@ -1,27 +0,0 @@
Copyright (c) 2009 The Go Authors. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

584
gojson/bench_test.go Normal file
View File

@ -0,0 +1,584 @@
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Large data benchmark.
// The JSON data is a summary of agl's changes in the
// go, webkit, and chromium open source projects.
// We benchmark converting between the JSON form
// and in-memory data structures.
package json
import (
"bytes"
"compress/gzip"
"fmt"
"internal/testenv"
"io"
"os"
"reflect"
"regexp"
"runtime"
"strings"
"sync"
"testing"
)
type codeResponse struct {
Tree *codeNode `json:"tree"`
Username string `json:"username"`
}
type codeNode struct {
Name string `json:"name"`
Kids []*codeNode `json:"kids"`
CLWeight float64 `json:"cl_weight"`
Touches int `json:"touches"`
MinT int64 `json:"min_t"`
MaxT int64 `json:"max_t"`
MeanT int64 `json:"mean_t"`
}
var codeJSON []byte
var codeStruct codeResponse
func codeInit() {
f, err := os.Open("testdata/code.json.gz")
if err != nil {
panic(err)
}
defer f.Close()
gz, err := gzip.NewReader(f)
if err != nil {
panic(err)
}
data, err := io.ReadAll(gz)
if err != nil {
panic(err)
}
codeJSON = data
if err := Unmarshal(codeJSON, &codeStruct); err != nil {
panic("unmarshal code.json: " + err.Error())
}
if data, err = Marshal(&codeStruct); err != nil {
panic("marshal code.json: " + err.Error())
}
if !bytes.Equal(data, codeJSON) {
println("different lengths", len(data), len(codeJSON))
for i := 0; i < len(data) && i < len(codeJSON); i++ {
if data[i] != codeJSON[i] {
println("re-marshal: changed at byte", i)
println("orig: ", string(codeJSON[i-10:i+10]))
println("new: ", string(data[i-10:i+10]))
break
}
}
panic("re-marshal code.json: different result")
}
}
func BenchmarkCodeEncoder(b *testing.B) {
b.ReportAllocs()
if codeJSON == nil {
b.StopTimer()
codeInit()
b.StartTimer()
}
b.RunParallel(func(pb *testing.PB) {
enc := NewEncoder(io.Discard)
for pb.Next() {
if err := enc.Encode(&codeStruct); err != nil {
b.Fatalf("Encode error: %v", err)
}
}
})
b.SetBytes(int64(len(codeJSON)))
}
func BenchmarkCodeEncoderError(b *testing.B) {
b.ReportAllocs()
if codeJSON == nil {
b.StopTimer()
codeInit()
b.StartTimer()
}
// Trigger an error in Marshal with cyclic data.
type Dummy struct {
Name string
Next *Dummy
}
dummy := Dummy{Name: "Dummy"}
dummy.Next = &dummy
b.RunParallel(func(pb *testing.PB) {
enc := NewEncoder(io.Discard)
for pb.Next() {
if err := enc.Encode(&codeStruct); err != nil {
b.Fatalf("Encode error: %v", err)
}
if _, err := Marshal(dummy); err == nil {
b.Fatal("Marshal error: got nil, want non-nil")
}
}
})
b.SetBytes(int64(len(codeJSON)))
}
func BenchmarkCodeMarshal(b *testing.B) {
b.ReportAllocs()
if codeJSON == nil {
b.StopTimer()
codeInit()
b.StartTimer()
}
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
if _, err := Marshal(&codeStruct); err != nil {
b.Fatalf("Marshal error: %v", err)
}
}
})
b.SetBytes(int64(len(codeJSON)))
}
func BenchmarkCodeMarshalError(b *testing.B) {
b.ReportAllocs()
if codeJSON == nil {
b.StopTimer()
codeInit()
b.StartTimer()
}
// Trigger an error in Marshal with cyclic data.
type Dummy struct {
Name string
Next *Dummy
}
dummy := Dummy{Name: "Dummy"}
dummy.Next = &dummy
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
if _, err := Marshal(&codeStruct); err != nil {
b.Fatalf("Marshal error: %v", err)
}
if _, err := Marshal(dummy); err == nil {
b.Fatal("Marshal error: got nil, want non-nil")
}
}
})
b.SetBytes(int64(len(codeJSON)))
}
func benchMarshalBytes(n int) func(*testing.B) {
sample := []byte("hello world")
// Use a struct pointer, to avoid an allocation when passing it as an
// interface parameter to Marshal.
v := &struct {
Bytes []byte
}{
bytes.Repeat(sample, (n/len(sample))+1)[:n],
}
return func(b *testing.B) {
for i := 0; i < b.N; i++ {
if _, err := Marshal(v); err != nil {
b.Fatalf("Marshal error: %v", err)
}
}
}
}
func benchMarshalBytesError(n int) func(*testing.B) {
sample := []byte("hello world")
// Use a struct pointer, to avoid an allocation when passing it as an
// interface parameter to Marshal.
v := &struct {
Bytes []byte
}{
bytes.Repeat(sample, (n/len(sample))+1)[:n],
}
// Trigger an error in Marshal with cyclic data.
type Dummy struct {
Name string
Next *Dummy
}
dummy := Dummy{Name: "Dummy"}
dummy.Next = &dummy
return func(b *testing.B) {
for i := 0; i < b.N; i++ {
if _, err := Marshal(v); err != nil {
b.Fatalf("Marshal error: %v", err)
}
if _, err := Marshal(dummy); err == nil {
b.Fatal("Marshal error: got nil, want non-nil")
}
}
}
}
func BenchmarkMarshalBytes(b *testing.B) {
b.ReportAllocs()
// 32 fits within encodeState.scratch.
b.Run("32", benchMarshalBytes(32))
// 256 doesn't fit in encodeState.scratch, but is small enough to
// allocate and avoid the slower base64.NewEncoder.
b.Run("256", benchMarshalBytes(256))
// 4096 is large enough that we want to avoid allocating for it.
b.Run("4096", benchMarshalBytes(4096))
}
func BenchmarkMarshalBytesError(b *testing.B) {
b.ReportAllocs()
// 32 fits within encodeState.scratch.
b.Run("32", benchMarshalBytesError(32))
// 256 doesn't fit in encodeState.scratch, but is small enough to
// allocate and avoid the slower base64.NewEncoder.
b.Run("256", benchMarshalBytesError(256))
// 4096 is large enough that we want to avoid allocating for it.
b.Run("4096", benchMarshalBytesError(4096))
}
func BenchmarkMarshalMap(b *testing.B) {
b.ReportAllocs()
m := map[string]int{
"key3": 3,
"key2": 2,
"key1": 1,
}
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
if _, err := Marshal(m); err != nil {
b.Fatal("Marshal:", err)
}
}
})
}
func BenchmarkCodeDecoder(b *testing.B) {
b.ReportAllocs()
if codeJSON == nil {
b.StopTimer()
codeInit()
b.StartTimer()
}
b.RunParallel(func(pb *testing.PB) {
var buf bytes.Buffer
dec := NewDecoder(&buf)
var r codeResponse
for pb.Next() {
buf.Write(codeJSON)
// hide EOF
buf.WriteByte('\n')
buf.WriteByte('\n')
buf.WriteByte('\n')
if err := dec.Decode(&r); err != nil {
b.Fatalf("Decode error: %v", err)
}
}
})
b.SetBytes(int64(len(codeJSON)))
}
func BenchmarkUnicodeDecoder(b *testing.B) {
b.ReportAllocs()
j := []byte(`"\uD83D\uDE01"`)
b.SetBytes(int64(len(j)))
r := bytes.NewReader(j)
dec := NewDecoder(r)
var out string
b.ResetTimer()
for i := 0; i < b.N; i++ {
if err := dec.Decode(&out); err != nil {
b.Fatalf("Decode error: %v", err)
}
r.Seek(0, 0)
}
}
func BenchmarkDecoderStream(b *testing.B) {
b.ReportAllocs()
b.StopTimer()
var buf bytes.Buffer
dec := NewDecoder(&buf)
buf.WriteString(`"` + strings.Repeat("x", 1000000) + `"` + "\n\n\n")
var x any
if err := dec.Decode(&x); err != nil {
b.Fatalf("Decode error: %v", err)
}
ones := strings.Repeat(" 1\n", 300000) + "\n\n\n"
b.StartTimer()
for i := 0; i < b.N; i++ {
if i%300000 == 0 {
buf.WriteString(ones)
}
x = nil
switch err := dec.Decode(&x); {
case err != nil:
b.Fatalf("Decode error: %v", err)
case x != 1.0:
b.Fatalf("Decode: got %v want 1.0", i)
}
}
}
func BenchmarkCodeUnmarshal(b *testing.B) {
b.ReportAllocs()
if codeJSON == nil {
b.StopTimer()
codeInit()
b.StartTimer()
}
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
var r codeResponse
if err := Unmarshal(codeJSON, &r); err != nil {
b.Fatalf("Unmarshal error: %v", err)
}
}
})
b.SetBytes(int64(len(codeJSON)))
}
func BenchmarkCodeUnmarshalReuse(b *testing.B) {
b.ReportAllocs()
if codeJSON == nil {
b.StopTimer()
codeInit()
b.StartTimer()
}
b.RunParallel(func(pb *testing.PB) {
var r codeResponse
for pb.Next() {
if err := Unmarshal(codeJSON, &r); err != nil {
b.Fatalf("Unmarshal error: %v", err)
}
}
})
b.SetBytes(int64(len(codeJSON)))
}
func BenchmarkUnmarshalString(b *testing.B) {
b.ReportAllocs()
data := []byte(`"hello, world"`)
b.RunParallel(func(pb *testing.PB) {
var s string
for pb.Next() {
if err := Unmarshal(data, &s); err != nil {
b.Fatalf("Unmarshal error: %v", err)
}
}
})
}
func BenchmarkUnmarshalFloat64(b *testing.B) {
b.ReportAllocs()
data := []byte(`3.14`)
b.RunParallel(func(pb *testing.PB) {
var f float64
for pb.Next() {
if err := Unmarshal(data, &f); err != nil {
b.Fatalf("Unmarshal error: %v", err)
}
}
})
}
func BenchmarkUnmarshalInt64(b *testing.B) {
b.ReportAllocs()
data := []byte(`3`)
b.RunParallel(func(pb *testing.PB) {
var x int64
for pb.Next() {
if err := Unmarshal(data, &x); err != nil {
b.Fatalf("Unmarshal error: %v", err)
}
}
})
}
func BenchmarkUnmarshalMap(b *testing.B) {
b.ReportAllocs()
data := []byte(`{"key1":"value1","key2":"value2","key3":"value3"}`)
b.RunParallel(func(pb *testing.PB) {
x := make(map[string]string, 3)
for pb.Next() {
if err := Unmarshal(data, &x); err != nil {
b.Fatalf("Unmarshal error: %v", err)
}
}
})
}
func BenchmarkIssue10335(b *testing.B) {
b.ReportAllocs()
j := []byte(`{"a":{ }}`)
b.RunParallel(func(pb *testing.PB) {
var s struct{}
for pb.Next() {
if err := Unmarshal(j, &s); err != nil {
b.Fatalf("Unmarshal error: %v", err)
}
}
})
}
func BenchmarkIssue34127(b *testing.B) {
b.ReportAllocs()
j := struct {
Bar string `json:"bar,string"`
}{
Bar: `foobar`,
}
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
if _, err := Marshal(&j); err != nil {
b.Fatalf("Marshal error: %v", err)
}
}
})
}
func BenchmarkUnmapped(b *testing.B) {
b.ReportAllocs()
j := []byte(`{"s": "hello", "y": 2, "o": {"x": 0}, "a": [1, 99, {"x": 1}]}`)
b.RunParallel(func(pb *testing.PB) {
var s struct{}
for pb.Next() {
if err := Unmarshal(j, &s); err != nil {
b.Fatalf("Unmarshal error: %v", err)
}
}
})
}
func BenchmarkTypeFieldsCache(b *testing.B) {
b.ReportAllocs()
var maxTypes int = 1e6
if testenv.Builder() != "" {
maxTypes = 1e3 // restrict cache sizes on builders
}
// Dynamically generate many new types.
types := make([]reflect.Type, maxTypes)
fs := []reflect.StructField{{
Type: reflect.TypeFor[string](),
Index: []int{0},
}}
for i := range types {
fs[0].Name = fmt.Sprintf("TypeFieldsCache%d", i)
types[i] = reflect.StructOf(fs)
}
// clearClear clears the cache. Other JSON operations, must not be running.
clearCache := func() {
fieldCache = sync.Map{}
}
// MissTypes tests the performance of repeated cache misses.
// This measures the time to rebuild a cache of size nt.
for nt := 1; nt <= maxTypes; nt *= 10 {
ts := types[:nt]
b.Run(fmt.Sprintf("MissTypes%d", nt), func(b *testing.B) {
nc := runtime.GOMAXPROCS(0)
for i := 0; i < b.N; i++ {
clearCache()
var wg sync.WaitGroup
for j := 0; j < nc; j++ {
wg.Add(1)
go func(j int) {
for _, t := range ts[(j*len(ts))/nc : ((j+1)*len(ts))/nc] {
cachedTypeFields(t)
}
wg.Done()
}(j)
}
wg.Wait()
}
})
}
// HitTypes tests the performance of repeated cache hits.
// This measures the average time of each cache lookup.
for nt := 1; nt <= maxTypes; nt *= 10 {
// Pre-warm a cache of size nt.
clearCache()
for _, t := range types[:nt] {
cachedTypeFields(t)
}
b.Run(fmt.Sprintf("HitTypes%d", nt), func(b *testing.B) {
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
cachedTypeFields(types[0])
}
})
})
}
}
func BenchmarkEncodeMarshaler(b *testing.B) {
b.ReportAllocs()
m := struct {
A int
B RawMessage
}{}
b.RunParallel(func(pb *testing.PB) {
enc := NewEncoder(io.Discard)
for pb.Next() {
if err := enc.Encode(&m); err != nil {
b.Fatalf("Encode error: %v", err)
}
}
})
}
func BenchmarkEncoderEncode(b *testing.B) {
b.ReportAllocs()
type T struct {
X, Y string
}
v := &T{"foo", "bar"}
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
if err := NewEncoder(io.Discard).Encode(v); err != nil {
b.Fatalf("Encode error: %v", err)
}
}
})
}
func BenchmarkNumberIsValid(b *testing.B) {
s := "-61657.61667E+61673"
for i := 0; i < b.N; i++ {
isValidNumber(s)
}
}
func BenchmarkNumberIsValidRegexp(b *testing.B) {
var jsonNumberRegexp = regexp.MustCompile(`^-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?$`)
s := "-61657.61667E+61673"
for i := 0; i < b.N; i++ {
jsonNumberRegexp.MatchString(s)
}
}
func BenchmarkUnmarshalNumber(b *testing.B) {
b.ReportAllocs()
data := []byte(`"-61657.61667E+61673"`)
var number Number
for i := 0; i < b.N; i++ {
if err := Unmarshal(data, &number); err != nil {
b.Fatal("Unmarshal:", err)
}
}
}

View File

@ -17,14 +17,15 @@ import (
"unicode"
"unicode/utf16"
"unicode/utf8"
_ "unsafe" // for linkname
)
// Unmarshal parses the JSON-encoded data and stores the result
// in the value pointed to by v. If v is nil or not a pointer,
// Unmarshal returns an InvalidUnmarshalError.
// Unmarshal returns an [InvalidUnmarshalError].
//
// Unmarshal uses the inverse of the encodings that
// Marshal uses, allocating maps, slices, and pointers as necessary,
// [Marshal] uses, allocating maps, slices, and pointers as necessary,
// with the following additional rules:
//
// To unmarshal JSON into a pointer, Unmarshal first handles the case of
@ -33,28 +34,28 @@ import (
// the value pointed at by the pointer. If the pointer is nil, Unmarshal
// allocates a new value for it to point to.
//
// To unmarshal JSON into a value implementing the Unmarshaler interface,
// Unmarshal calls that value's UnmarshalJSON method, including
// To unmarshal JSON into a value implementing [Unmarshaler],
// Unmarshal calls that value's [Unmarshaler.UnmarshalJSON] method, including
// when the input is a JSON null.
// Otherwise, if the value implements encoding.TextUnmarshaler
// and the input is a JSON quoted string, Unmarshal calls that value's
// UnmarshalText method with the unquoted form of the string.
// Otherwise, if the value implements [encoding.TextUnmarshaler]
// and the input is a JSON quoted string, Unmarshal calls
// [encoding.TextUnmarshaler.UnmarshalText] with the unquoted form of the string.
//
// To unmarshal JSON into a struct, Unmarshal matches incoming object
// keys to the keys used by Marshal (either the struct field name or its tag),
// keys to the keys used by [Marshal] (either the struct field name or its tag),
// preferring an exact match but also accepting a case-insensitive match. By
// default, object keys which don't have a corresponding struct field are
// ignored (see Decoder.DisallowUnknownFields for an alternative).
// ignored (see [Decoder.DisallowUnknownFields] for an alternative).
//
// To unmarshal JSON into an interface value,
// Unmarshal stores one of these in the interface value:
//
// bool, for JSON booleans
// float64, for JSON numbers
// string, for JSON strings
// []interface{}, for JSON arrays
// map[string]interface{}, for JSON objects
// nil for JSON null
// - bool, for JSON booleans
// - float64, for JSON numbers
// - string, for JSON strings
// - []interface{}, for JSON arrays
// - map[string]interface{}, for JSON objects
// - nil for JSON null
//
// To unmarshal a JSON array into a slice, Unmarshal resets the slice length
// to zero and then appends each element to the slice.
@ -72,16 +73,15 @@ import (
// use. If the map is nil, Unmarshal allocates a new map. Otherwise Unmarshal
// reuses the existing map, keeping existing entries. Unmarshal then stores
// key-value pairs from the JSON object into the map. The map's key type must
// either be any string type, an integer, implement json.Unmarshaler, or
// implement encoding.TextUnmarshaler.
// either be any string type, an integer, or implement [encoding.TextUnmarshaler].
//
// If the JSON-encoded data contain a syntax error, Unmarshal returns a SyntaxError.
// If the JSON-encoded data contain a syntax error, Unmarshal returns a [SyntaxError].
//
// If a JSON value is not appropriate for a given target type,
// or if a JSON number overflows the target type, Unmarshal
// skips that field and completes the unmarshaling as best it can.
// If no more serious errors are encountered, Unmarshal returns
// an UnmarshalTypeError describing the earliest such error. In any
// an [UnmarshalTypeError] describing the earliest such error. In any
// case, it's not guaranteed that all the remaining fields following
// the problematic one will be unmarshaled into the target object.
//
@ -114,7 +114,7 @@ func Unmarshal(data []byte, v any) error {
// a JSON value. UnmarshalJSON must copy the JSON data
// if it wishes to retain the data after returning.
//
// By convention, to approximate the behavior of Unmarshal itself,
// By convention, to approximate the behavior of [Unmarshal] itself,
// Unmarshalers implement UnmarshalJSON([]byte("null")) as a no-op.
type Unmarshaler interface {
UnmarshalJSON([]byte) error
@ -151,8 +151,8 @@ func (e *UnmarshalFieldError) Error() string {
return "json: cannot unmarshal object key " + strconv.Quote(e.Key) + " into unexported field " + e.Field.Name + " of type " + e.Type.String()
}
// An InvalidUnmarshalError describes an invalid argument passed to Unmarshal.
// (The argument to Unmarshal must be a non-nil pointer.)
// An InvalidUnmarshalError describes an invalid argument passed to [Unmarshal].
// (The argument to [Unmarshal] must be a non-nil pointer.)
type InvalidUnmarshalError struct {
Type reflect.Type
}
@ -217,7 +217,6 @@ type decodeState struct {
savedError error
useNumber bool
disallowUnknownFields bool
tagkey *string
}
// readIndex returns the position of the last byte read.
@ -541,17 +540,10 @@ func (d *decodeState) array(v reflect.Value) error {
break
}
// Get element of array, growing if necessary.
// Expand slice length, growing the slice if necessary.
if v.Kind() == reflect.Slice {
// Grow slice if necessary
if i >= v.Cap() {
newcap := v.Cap() + v.Cap()/2
if newcap < 4 {
newcap = 4
}
newv := reflect.MakeSlice(v.Type(), v.Len(), newcap)
reflect.Copy(newv, v)
v.Set(newv)
v.Grow(1)
}
if i >= v.Len() {
v.SetLen(i + 1)
@ -585,13 +577,11 @@ func (d *decodeState) array(v reflect.Value) error {
if i < v.Len() {
if v.Kind() == reflect.Array {
// Array. Zero the rest.
z := reflect.Zero(v.Type().Elem())
for ; i < v.Len(); i++ {
v.Index(i).Set(z)
v.Index(i).SetZero() // zero remainder of array
}
} else {
v.SetLen(i)
v.SetLen(i) // truncate the slice
}
}
if i == 0 && v.Kind() == reflect.Slice {
@ -601,7 +591,7 @@ func (d *decodeState) array(v reflect.Value) error {
}
var nullLiteral = []byte("null")
var textUnmarshalerType = reflect.TypeOf((*encoding.TextUnmarshaler)(nil)).Elem()
var textUnmarshalerType = reflect.TypeFor[encoding.TextUnmarshaler]()
// object consumes an object from d.data[d.off-1:], decoding into v.
// The first byte ('{') of the object has been read already.
@ -653,11 +643,7 @@ func (d *decodeState) object(v reflect.Value) error {
v.Set(reflect.MakeMap(t))
}
case reflect.Struct:
tagkey := "json"
if d.tagkey != nil {
tagkey = *d.tagkey
}
fields = cachedTypeFields(t, tagkey)
fields = cachedTypeFields(t)
// ok
default:
d.saveError(&UnmarshalTypeError{Value: "object", Type: t, Offset: int64(d.off)})
@ -700,24 +686,13 @@ func (d *decodeState) object(v reflect.Value) error {
if !mapElem.IsValid() {
mapElem = reflect.New(elemType).Elem()
} else {
mapElem.Set(reflect.Zero(elemType))
mapElem.SetZero()
}
subv = mapElem
} else {
var f *field
if i, ok := fields.nameIndex[string(key)]; ok {
// Found an exact name match.
f = &fields.list[i]
} else {
// Fall back to the expensive case-insensitive
// linear search.
for i := range fields.list {
ff := &fields.list[i]
if ff.equalFold(ff.nameBytes, key) {
f = ff
break
}
}
f := fields.byExactName[string(key)]
if f == nil {
f = fields.byFoldedName[string(foldName(key))]
}
if f != nil {
subv = v
@ -787,33 +762,35 @@ func (d *decodeState) object(v reflect.Value) error {
if v.Kind() == reflect.Map {
kt := t.Key()
var kv reflect.Value
switch {
case reflect.PointerTo(kt).Implements(textUnmarshalerType):
if reflect.PointerTo(kt).Implements(textUnmarshalerType) {
kv = reflect.New(kt)
if err := d.literalStore(item, kv, true); err != nil {
return err
}
kv = kv.Elem()
case kt.Kind() == reflect.String:
kv = reflect.ValueOf(key).Convert(kt)
default:
} else {
switch kt.Kind() {
case reflect.String:
kv = reflect.New(kt).Elem()
kv.SetString(string(key))
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
s := string(key)
n, err := strconv.ParseInt(s, 10, 64)
if err != nil || reflect.Zero(kt).OverflowInt(n) {
if err != nil || kt.OverflowInt(n) {
d.saveError(&UnmarshalTypeError{Value: "number " + s, Type: kt, Offset: int64(start + 1)})
break
}
kv = reflect.ValueOf(n).Convert(kt)
kv = reflect.New(kt).Elem()
kv.SetInt(n)
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
s := string(key)
n, err := strconv.ParseUint(s, 10, 64)
if err != nil || reflect.Zero(kt).OverflowUint(n) {
if err != nil || kt.OverflowUint(n) {
d.saveError(&UnmarshalTypeError{Value: "number " + s, Type: kt, Offset: int64(start + 1)})
break
}
kv = reflect.ValueOf(n).Convert(kt)
kv = reflect.New(kt).Elem()
kv.SetUint(n)
default:
panic("json: Unexpected key type") // should never occur
}
@ -852,12 +829,12 @@ func (d *decodeState) convertNumber(s string) (any, error) {
}
f, err := strconv.ParseFloat(s, 64)
if err != nil {
return nil, &UnmarshalTypeError{Value: "number " + s, Type: reflect.TypeOf(0.0), Offset: int64(d.off)}
return nil, &UnmarshalTypeError{Value: "number " + s, Type: reflect.TypeFor[float64](), Offset: int64(d.off)}
}
return f, nil
}
var numberType = reflect.TypeOf(Number(""))
var numberType = reflect.TypeFor[Number]()
// literalStore decodes a literal stored in item into v.
//
@ -867,7 +844,7 @@ var numberType = reflect.TypeOf(Number(""))
func (d *decodeState) literalStore(item []byte, v reflect.Value, fromQuoted bool) error {
// Check for unmarshaler.
if len(item) == 0 {
//Empty string given
// Empty string given.
d.saveError(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type()))
return nil
}
@ -914,7 +891,7 @@ func (d *decodeState) literalStore(item []byte, v reflect.Value, fromQuoted bool
}
switch v.Kind() {
case reflect.Interface, reflect.Pointer, reflect.Map, reflect.Slice:
v.Set(reflect.Zero(v.Type()))
v.SetZero()
// otherwise, ignore null for primitives/string
}
case 't', 'f': // true, false
@ -966,10 +943,11 @@ func (d *decodeState) literalStore(item []byte, v reflect.Value, fromQuoted bool
}
v.SetBytes(b[:n])
case reflect.String:
if v.Type() == numberType && !isValidNumber(string(s)) {
t := string(s)
if v.Type() == numberType && !isValidNumber(t) {
return fmt.Errorf("json: invalid number literal, trying to unmarshal %q into Number", item)
}
v.SetString(string(s))
v.SetString(t)
case reflect.Interface:
if v.NumMethod() == 0 {
v.Set(reflect.ValueOf(string(s)))
@ -985,13 +963,12 @@ func (d *decodeState) literalStore(item []byte, v reflect.Value, fromQuoted bool
}
panic(phasePanicMsg)
}
s := string(item)
switch v.Kind() {
default:
if v.Kind() == reflect.String && v.Type() == numberType {
// s must be a valid number, because it's
// already been tokenized.
v.SetString(s)
v.SetString(string(item))
break
}
if fromQuoted {
@ -999,7 +976,7 @@ func (d *decodeState) literalStore(item []byte, v reflect.Value, fromQuoted bool
}
d.saveError(&UnmarshalTypeError{Value: "number", Type: v.Type(), Offset: int64(d.readIndex())})
case reflect.Interface:
n, err := d.convertNumber(s)
n, err := d.convertNumber(string(item))
if err != nil {
d.saveError(err)
break
@ -1011,25 +988,25 @@ func (d *decodeState) literalStore(item []byte, v reflect.Value, fromQuoted bool
v.Set(reflect.ValueOf(n))
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
n, err := strconv.ParseInt(s, 10, 64)
n, err := strconv.ParseInt(string(item), 10, 64)
if err != nil || v.OverflowInt(n) {
d.saveError(&UnmarshalTypeError{Value: "number " + s, Type: v.Type(), Offset: int64(d.readIndex())})
d.saveError(&UnmarshalTypeError{Value: "number " + string(item), Type: v.Type(), Offset: int64(d.readIndex())})
break
}
v.SetInt(n)
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
n, err := strconv.ParseUint(s, 10, 64)
n, err := strconv.ParseUint(string(item), 10, 64)
if err != nil || v.OverflowUint(n) {
d.saveError(&UnmarshalTypeError{Value: "number " + s, Type: v.Type(), Offset: int64(d.readIndex())})
d.saveError(&UnmarshalTypeError{Value: "number " + string(item), Type: v.Type(), Offset: int64(d.readIndex())})
break
}
v.SetUint(n)
case reflect.Float32, reflect.Float64:
n, err := strconv.ParseFloat(s, v.Type().Bits())
n, err := strconv.ParseFloat(string(item), v.Type().Bits())
if err != nil || v.OverflowFloat(n) {
d.saveError(&UnmarshalTypeError{Value: "number " + s, Type: v.Type(), Offset: int64(d.readIndex())})
d.saveError(&UnmarshalTypeError{Value: "number " + string(item), Type: v.Type(), Offset: int64(d.readIndex())})
break
}
v.SetFloat(n)
@ -1201,6 +1178,15 @@ func unquote(s []byte) (t string, ok bool) {
return
}
// unquoteBytes should be an internal detail,
// but widely used packages access it using linkname.
// Notable members of the hall of shame include:
// - github.com/bytedance/sonic
//
// Do not remove or change the type signature.
// See go.dev/issue/67401.
//
//go:linkname unquoteBytes
func unquoteBytes(s []byte) (t []byte, ok bool) {
if len(s) < 2 || s[0] != '"' || s[len(s)-1] != '"' {
return

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -5,140 +5,44 @@
package json
import (
"bytes"
"unicode"
"unicode/utf8"
)
const (
caseMask = ^byte(0x20) // Mask to ignore case in ASCII.
kelvin = '\u212a'
smallLongEss = '\u017f'
)
// foldFunc returns one of four different case folding equivalence
// functions, from most general (and slow) to fastest:
//
// 1) bytes.EqualFold, if the key s contains any non-ASCII UTF-8
// 2) equalFoldRight, if s contains special folding ASCII ('k', 'K', 's', 'S')
// 3) asciiEqualFold, no special, but includes non-letters (including _)
// 4) simpleLetterEqualFold, no specials, no non-letters.
//
// The letters S and K are special because they map to 3 runes, not just 2:
// - S maps to s and to U+017F 'ſ' Latin small letter long s
// - k maps to K and to U+212A '' Kelvin sign
//
// See https://play.golang.org/p/tTxjOc0OGo
//
// The returned function is specialized for matching against s and
// should only be given s. It's not curried for performance reasons.
func foldFunc(s []byte) func(s, t []byte) bool {
nonLetter := false
special := false // special letter
for _, b := range s {
if b >= utf8.RuneSelf {
return bytes.EqualFold
}
upper := b & caseMask
if upper < 'A' || upper > 'Z' {
nonLetter = true
} else if upper == 'K' || upper == 'S' {
// See above for why these letters are special.
special = true
}
}
if special {
return equalFoldRight
}
if nonLetter {
return asciiEqualFold
}
return simpleLetterEqualFold
// foldName returns a folded string such that foldName(x) == foldName(y)
// is identical to bytes.EqualFold(x, y).
func foldName(in []byte) []byte {
// This is inlinable to take advantage of "function outlining".
var arr [32]byte // large enough for most JSON names
return appendFoldedName(arr[:0], in)
}
// equalFoldRight is a specialization of bytes.EqualFold when s is
// known to be all ASCII (including punctuation), but contains an 's',
// 'S', 'k', or 'K', requiring a Unicode fold on the bytes in t.
// See comments on foldFunc.
func equalFoldRight(s, t []byte) bool {
for _, sb := range s {
if len(t) == 0 {
return false
func appendFoldedName(out, in []byte) []byte {
for i := 0; i < len(in); {
// Handle single-byte ASCII.
if c := in[i]; c < utf8.RuneSelf {
if 'a' <= c && c <= 'z' {
c -= 'a' - 'A'
}
tb := t[0]
if tb < utf8.RuneSelf {
if sb != tb {
sbUpper := sb & caseMask
if 'A' <= sbUpper && sbUpper <= 'Z' {
if sbUpper != tb&caseMask {
return false
}
} else {
return false
}
}
t = t[1:]
out = append(out, c)
i++
continue
}
// sb is ASCII and t is not. t must be either kelvin
// sign or long s; sb must be s, S, k, or K.
tr, size := utf8.DecodeRune(t)
switch sb {
case 's', 'S':
if tr != smallLongEss {
return false
// Handle multi-byte Unicode.
r, n := utf8.DecodeRune(in[i:])
out = utf8.AppendRune(out, foldRune(r))
i += n
}
case 'k', 'K':
if tr != kelvin {
return false
}
default:
return false
}
t = t[size:]
}
if len(t) > 0 {
return false
}
return true
return out
}
// asciiEqualFold is a specialization of bytes.EqualFold for use when
// s is all ASCII (but may contain non-letters) and contains no
// special-folding letters.
// See comments on foldFunc.
func asciiEqualFold(s, t []byte) bool {
if len(s) != len(t) {
return false
// foldRune is returns the smallest rune for all runes in the same fold set.
func foldRune(r rune) rune {
for {
r2 := unicode.SimpleFold(r)
if r2 <= r {
return r2
}
for i, sb := range s {
tb := t[i]
if sb == tb {
continue
}
if ('a' <= sb && sb <= 'z') || ('A' <= sb && sb <= 'Z') {
if sb&caseMask != tb&caseMask {
return false
}
} else {
return false
r = r2
}
}
return true
}
// simpleLetterEqualFold is a specialization of bytes.EqualFold for
// use when s is all ASCII letters (no underscores, etc) and also
// doesn't contain 'k', 'K', 's', or 'S'.
// See comments on foldFunc.
func simpleLetterEqualFold(s, t []byte) bool {
if len(s) != len(t) {
return false
}
for i, b := range s {
if b&caseMask != t[i]&caseMask {
return false
}
}
return true
}

View File

@ -6,111 +6,45 @@ package json
import (
"bytes"
"strings"
"testing"
"unicode/utf8"
)
var foldTests = []struct {
fn func(s, t []byte) bool
s, t string
want bool
func FuzzEqualFold(f *testing.F) {
for _, ss := range [][2]string{
{"", ""},
{"123abc", "123ABC"},
{"αβδ", "ΑΒΔ"},
{"abc", "xyz"},
{"abc", "XYZ"},
{"1", "2"},
{"hello, world!", "hello, world!"},
{"hello, world!", "Hello, World!"},
{"hello, world!", "HELLO, WORLD!"},
{"hello, world!", "jello, world!"},
{"γειά, κόσμε!", "γειά, κόσμε!"},
{"γειά, κόσμε!", "Γειά, Κόσμε!"},
{"γειά, κόσμε!", "ΓΕΙΆ, ΚΌΣΜΕ!"},
{"γειά, κόσμε!", "ΛΕΙΆ, ΚΌΣΜΕ!"},
{"AESKey", "aesKey"},
{"AESKEY", "aes_key"},
{"aes_key", "AES_KEY"},
{"AES_KEY", "aes-key"},
{"aes-key", "AES-KEY"},
{"AES-KEY", "aesKey"},
{"aesKey", "AesKey"},
{"AesKey", "AESKey"},
{"AESKey", "aeskey"},
{"DESKey", "aeskey"},
{"AES Key", "aeskey"},
} {
{equalFoldRight, "", "", true},
{equalFoldRight, "a", "a", true},
{equalFoldRight, "", "a", false},
{equalFoldRight, "a", "", false},
{equalFoldRight, "a", "A", true},
{equalFoldRight, "AB", "ab", true},
{equalFoldRight, "AB", "ac", false},
{equalFoldRight, "sbkKc", "ſbKc", true},
{equalFoldRight, "SbKkc", "ſbKc", true},
{equalFoldRight, "SbKkc", "ſbKK", false},
{equalFoldRight, "e", "é", false},
{equalFoldRight, "s", "S", true},
{simpleLetterEqualFold, "", "", true},
{simpleLetterEqualFold, "abc", "abc", true},
{simpleLetterEqualFold, "abc", "ABC", true},
{simpleLetterEqualFold, "abc", "ABCD", false},
{simpleLetterEqualFold, "abc", "xxx", false},
{asciiEqualFold, "a_B", "A_b", true},
{asciiEqualFold, "aa@", "aa`", false}, // verify 0x40 and 0x60 aren't case-equivalent
f.Add([]byte(ss[0]), []byte(ss[1]))
}
func TestFold(t *testing.T) {
for i, tt := range foldTests {
if got := tt.fn([]byte(tt.s), []byte(tt.t)); got != tt.want {
t.Errorf("%d. %q, %q = %v; want %v", i, tt.s, tt.t, got, tt.want)
equalFold := func(x, y []byte) bool { return string(foldName(x)) == string(foldName(y)) }
f.Fuzz(func(t *testing.T, x, y []byte) {
got := equalFold(x, y)
want := bytes.EqualFold(x, y)
if got != want {
t.Errorf("equalFold(%q, %q) = %v, want %v", x, y, got, want)
}
truth := strings.EqualFold(tt.s, tt.t)
if truth != tt.want {
t.Errorf("strings.EqualFold doesn't agree with case %d", i)
}
}
}
func TestFoldAgainstUnicode(t *testing.T) {
const bufSize = 5
buf1 := make([]byte, 0, bufSize)
buf2 := make([]byte, 0, bufSize)
var runes []rune
for i := 0x20; i <= 0x7f; i++ {
runes = append(runes, rune(i))
}
runes = append(runes, kelvin, smallLongEss)
funcs := []struct {
name string
fold func(s, t []byte) bool
letter bool // must be ASCII letter
simple bool // must be simple ASCII letter (not 'S' or 'K')
}{
{
name: "equalFoldRight",
fold: equalFoldRight,
},
{
name: "asciiEqualFold",
fold: asciiEqualFold,
simple: true,
},
{
name: "simpleLetterEqualFold",
fold: simpleLetterEqualFold,
simple: true,
letter: true,
},
}
for _, ff := range funcs {
for _, r := range runes {
if r >= utf8.RuneSelf {
continue
}
if ff.letter && !isASCIILetter(byte(r)) {
continue
}
if ff.simple && (r == 's' || r == 'S' || r == 'k' || r == 'K') {
continue
}
for _, r2 := range runes {
buf1 := append(buf1[:0], 'x')
buf2 := append(buf2[:0], 'x')
buf1 = buf1[:1+utf8.EncodeRune(buf1[1:bufSize], r)]
buf2 = buf2[:1+utf8.EncodeRune(buf2[1:bufSize], r2)]
buf1 = append(buf1, 'x')
buf2 = append(buf2, 'x')
want := bytes.EqualFold(buf1, buf2)
if got := ff.fold(buf1, buf2); got != want {
t.Errorf("%s(%q, %q) = %v; want %v", ff.name, buf1, buf2, got, want)
}
}
}
}
}
func isASCIILetter(b byte) bool {
return ('A' <= b && b <= 'Z') || ('a' <= b && b <= 'z')
})
}

View File

@ -1,42 +0,0 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build gofuzz
package json
import (
"fmt"
)
func Fuzz(data []byte) (score int) {
for _, ctor := range []func() any{
func() any { return new(any) },
func() any { return new(map[string]any) },
func() any { return new([]any) },
} {
v := ctor()
err := Unmarshal(data, v)
if err != nil {
continue
}
score = 1
m, err := Marshal(v)
if err != nil {
fmt.Printf("v=%#v\n", v)
panic(err)
}
u := ctor()
err = Unmarshal(m, u)
if err != nil {
fmt.Printf("v=%#v\n", v)
fmt.Printf("m=%s\n", m)
panic(err)
}
}
return
}

View File

@ -1,53 +0,0 @@
package json
import (
"net/http"
)
// Render interface is copied from github.com/gin-gonic/gin@v1.8.1/render/render.go
type Render interface {
// Render writes data with custom ContentType.
Render(http.ResponseWriter) error
// WriteContentType writes custom ContentType.
WriteContentType(w http.ResponseWriter)
}
type GoJsonRender struct {
Data any
NilSafeSlices bool
NilSafeMaps bool
Indent *IndentOpt
Filter *string
}
func (r GoJsonRender) Render(w http.ResponseWriter) error {
header := w.Header()
if val := header["Content-Type"]; len(val) == 0 {
header["Content-Type"] = []string{"application/json; charset=utf-8"}
}
jsonBytes, err := MarshalSafeCollections(r.Data, r.NilSafeSlices, r.NilSafeMaps, r.Indent, r.Filter)
if err != nil {
panic(err)
}
_, err = w.Write(jsonBytes)
if err != nil {
panic(err)
}
return nil
}
func (r GoJsonRender) RenderString() (string, error) {
jsonBytes, err := MarshalSafeCollections(r.Data, r.NilSafeSlices, r.NilSafeMaps, r.Indent, r.Filter)
if err != nil {
panic(err)
}
return string(jsonBytes), nil
}
func (r GoJsonRender) WriteContentType(w http.ResponseWriter) {
header := w.Header()
if val := header["Content-Type"]; len(val) == 0 {
header["Content-Type"] = []string{"application/json; charset=utf-8"}
}
}

View File

@ -4,38 +4,67 @@
package json
import (
"bytes"
)
import "bytes"
// HTMLEscape appends to dst the JSON-encoded src with <, >, &, U+2028 and U+2029
// characters inside string literals changed to \u003c, \u003e, \u0026, \u2028, \u2029
// so that the JSON will be safe to embed inside HTML <script> tags.
// For historical reasons, web browsers don't honor standard HTML
// escaping within <script> tags, so an alternative JSON encoding must be used.
func HTMLEscape(dst *bytes.Buffer, src []byte) {
dst.Grow(len(src))
dst.Write(appendHTMLEscape(dst.AvailableBuffer(), src))
}
func appendHTMLEscape(dst, src []byte) []byte {
// The characters can only appear in string literals,
// so just scan the string one byte at a time.
start := 0
for i, c := range src {
if c == '<' || c == '>' || c == '&' {
dst = append(dst, src[start:i]...)
dst = append(dst, '\\', 'u', '0', '0', hex[c>>4], hex[c&0xF])
start = i + 1
}
// Convert U+2028 and U+2029 (E2 80 A8 and E2 80 A9).
if c == 0xE2 && i+2 < len(src) && src[i+1] == 0x80 && src[i+2]&^1 == 0xA8 {
dst = append(dst, src[start:i]...)
dst = append(dst, '\\', 'u', '2', '0', '2', hex[src[i+2]&0xF])
start = i + len("\u2029")
}
}
return append(dst, src[start:]...)
}
// Compact appends to dst the JSON-encoded src with
// insignificant space characters elided.
func Compact(dst *bytes.Buffer, src []byte) error {
return compact(dst, src, false)
dst.Grow(len(src))
b := dst.AvailableBuffer()
b, err := appendCompact(b, src, false)
dst.Write(b)
return err
}
func compact(dst *bytes.Buffer, src []byte, escape bool) error {
origLen := dst.Len()
func appendCompact(dst, src []byte, escape bool) ([]byte, error) {
origLen := len(dst)
scan := newScanner()
defer freeScanner(scan)
start := 0
for i, c := range src {
if escape && (c == '<' || c == '>' || c == '&') {
if start < i {
dst.Write(src[start:i])
dst = append(dst, src[start:i]...)
}
dst.WriteString(`\u00`)
dst.WriteByte(hex[c>>4])
dst.WriteByte(hex[c&0xF])
dst = append(dst, '\\', 'u', '0', '0', hex[c>>4], hex[c&0xF])
start = i + 1
}
// Convert U+2028 and U+2029 (E2 80 A8 and E2 80 A9).
if escape && c == 0xE2 && i+2 < len(src) && src[i+1] == 0x80 && src[i+2]&^1 == 0xA8 {
if start < i {
dst.Write(src[start:i])
dst = append(dst, src[start:i]...)
}
dst.WriteString(`\u202`)
dst.WriteByte(hex[src[i+2]&0xF])
dst = append(dst, '\\', 'u', '2', '0', '2', hex[src[i+2]&0xF])
start = i + 3
}
v := scan.step(scan, c)
@ -44,29 +73,37 @@ func compact(dst *bytes.Buffer, src []byte, escape bool) error {
break
}
if start < i {
dst.Write(src[start:i])
dst = append(dst, src[start:i]...)
}
start = i + 1
}
}
if scan.eof() == scanError {
dst.Truncate(origLen)
return scan.err
return dst[:origLen], scan.err
}
if start < len(src) {
dst.Write(src[start:])
dst = append(dst, src[start:]...)
}
return nil
return dst, nil
}
func newline(dst *bytes.Buffer, prefix, indent string, depth int) {
dst.WriteByte('\n')
dst.WriteString(prefix)
func appendNewline(dst []byte, prefix, indent string, depth int) []byte {
dst = append(dst, '\n')
dst = append(dst, prefix...)
for i := 0; i < depth; i++ {
dst.WriteString(indent)
dst = append(dst, indent...)
}
return dst
}
// indentGrowthFactor specifies the growth factor of indenting JSON input.
// Empirically, the growth factor was measured to be between 1.4x to 1.8x
// for some set of compacted JSON with the indent being a single tab.
// Specify a growth factor slightly larger than what is observed
// to reduce probability of allocation in appendIndent.
// A factor no higher than 2 ensures that wasted space never exceeds 50%.
const indentGrowthFactor = 2
// Indent appends to dst an indented form of the JSON-encoded src.
// Each element in a JSON object or array begins on a new,
// indented line beginning with prefix followed by one or more
@ -79,7 +116,15 @@ func newline(dst *bytes.Buffer, prefix, indent string, depth int) {
// For example, if src has no trailing spaces, neither will dst;
// if src ends in a trailing newline, so will dst.
func Indent(dst *bytes.Buffer, src []byte, prefix, indent string) error {
origLen := dst.Len()
dst.Grow(indentGrowthFactor * len(src))
b := dst.AvailableBuffer()
b, err := appendIndent(b, src, prefix, indent)
dst.Write(b)
return err
}
func appendIndent(dst, src []byte, prefix, indent string) ([]byte, error) {
origLen := len(dst)
scan := newScanner()
defer freeScanner(scan)
needIndent := false
@ -96,13 +141,13 @@ func Indent(dst *bytes.Buffer, src []byte, prefix, indent string) error {
if needIndent && v != scanEndObject && v != scanEndArray {
needIndent = false
depth++
newline(dst, prefix, indent, depth)
dst = appendNewline(dst, prefix, indent, depth)
}
// Emit semantically uninteresting bytes
// (in particular, punctuation in strings) unmodified.
if v == scanContinue {
dst.WriteByte(c)
dst = append(dst, c)
continue
}
@ -111,33 +156,27 @@ func Indent(dst *bytes.Buffer, src []byte, prefix, indent string) error {
case '{', '[':
// delay indent so that empty object and array are formatted as {} and [].
needIndent = true
dst.WriteByte(c)
dst = append(dst, c)
case ',':
dst.WriteByte(c)
newline(dst, prefix, indent, depth)
dst = append(dst, c)
dst = appendNewline(dst, prefix, indent, depth)
case ':':
dst.WriteByte(c)
dst.WriteByte(' ')
dst = append(dst, c, ' ')
case '}', ']':
if needIndent {
// suppress indent in empty object/array
needIndent = false
} else {
depth--
newline(dst, prefix, indent, depth)
dst = appendNewline(dst, prefix, indent, depth)
}
dst.WriteByte(c)
dst = append(dst, c)
default:
dst.WriteByte(c)
dst = append(dst, c)
}
}
if scan.eof() == scanError {
dst.Truncate(origLen)
return scan.err
return dst[:origLen], scan.err
}
return nil
return dst, nil
}

View File

@ -116,18 +116,3 @@ func TestNumberIsValid(t *testing.T) {
}
}
}
func BenchmarkNumberIsValid(b *testing.B) {
s := "-61657.61667E+61673"
for i := 0; i < b.N; i++ {
isValidNumber(s)
}
}
func BenchmarkNumberIsValidRegexp(b *testing.B) {
var jsonNumberRegexp = regexp.MustCompile(`^-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?$`)
s := "-61657.61667E+61673"
for i := 0; i < b.N; i++ {
jsonNumberRegexp.MatchString(s)
}
}

View File

@ -43,7 +43,7 @@ func checkValid(data []byte, scan *scanner) error {
}
// A SyntaxError is a description of a JSON syntax error.
// Unmarshal will return a SyntaxError if the JSON can't be parsed.
// [Unmarshal] will return a SyntaxError if the JSON can't be parsed.
type SyntaxError struct {
msg string // description of error
Offset int64 // error occurred after reading Offset bytes
@ -594,7 +594,7 @@ func (s *scanner) error(c byte, context string) int {
return scanError
}
// quoteChar formats c as a quoted character literal
// quoteChar formats c as a quoted character literal.
func quoteChar(c byte) string {
// special cases - different from quoted strings
if c == '\'' {

View File

@ -9,51 +9,59 @@ import (
"math"
"math/rand"
"reflect"
"strings"
"testing"
)
var validTests = []struct {
data string
ok bool
}{
{`foo`, false},
{`}{`, false},
{`{]`, false},
{`{}`, true},
{`{"foo":"bar"}`, true},
{`{"foo":"bar","bar":{"baz":["qux"]}}`, true},
func indentNewlines(s string) string {
return strings.Join(strings.Split(s, "\n"), "\n\t")
}
func stripWhitespace(s string) string {
return strings.Map(func(r rune) rune {
if r == ' ' || r == '\n' || r == '\r' || r == '\t' {
return -1
}
return r
}, s)
}
func TestValid(t *testing.T) {
for _, tt := range validTests {
tests := []struct {
CaseName
data string
ok bool
}{
{Name(""), `foo`, false},
{Name(""), `}{`, false},
{Name(""), `{]`, false},
{Name(""), `{}`, true},
{Name(""), `{"foo":"bar"}`, true},
{Name(""), `{"foo":"bar","bar":{"baz":["qux"]}}`, true},
}
for _, tt := range tests {
t.Run(tt.Name, func(t *testing.T) {
if ok := Valid([]byte(tt.data)); ok != tt.ok {
t.Errorf("Valid(%#q) = %v, want %v", tt.data, ok, tt.ok)
t.Errorf("%s: Valid(`%s`) = %v, want %v", tt.Where, tt.data, ok, tt.ok)
}
})
}
}
// Tests of simple examples.
type example struct {
func TestCompactAndIndent(t *testing.T) {
tests := []struct {
CaseName
compact string
indent string
}
var examples = []example{
{`1`, `1`},
{`{}`, `{}`},
{`[]`, `[]`},
{`{"":2}`, "{\n\t\"\": 2\n}"},
{`[3]`, "[\n\t3\n]"},
{`[1,2,3]`, "[\n\t1,\n\t2,\n\t3\n]"},
{`{"x":1}`, "{\n\t\"x\": 1\n}"},
{ex1, ex1i},
{"{\"\":\"<>&\u2028\u2029\"}", "{\n\t\"\": \"<>&\u2028\u2029\"\n}"}, // See golang.org/issue/34070
}
var ex1 = `[true,false,null,"x",1,1.5,0,-5e+2]`
var ex1i = `[
}{
{Name(""), `1`, `1`},
{Name(""), `{}`, `{}`},
{Name(""), `[]`, `[]`},
{Name(""), `{"":2}`, "{\n\t\"\": 2\n}"},
{Name(""), `[3]`, "[\n\t3\n]"},
{Name(""), `[1,2,3]`, "[\n\t1,\n\t2,\n\t3\n]"},
{Name(""), `{"x":1}`, "{\n\t\"x\": 1\n}"},
{Name(""), `[true,false,null,"x",1,1.5,0,-5e+2]`, `[
true,
false,
null,
@ -62,25 +70,40 @@ var ex1i = `[
1.5,
0,
-5e+2
]`
func TestCompact(t *testing.T) {
]`},
{Name(""), "{\"\":\"<>&\u2028\u2029\"}", "{\n\t\"\": \"<>&\u2028\u2029\"\n}"}, // See golang.org/issue/34070
}
var buf bytes.Buffer
for _, tt := range examples {
for _, tt := range tests {
t.Run(tt.Name, func(t *testing.T) {
buf.Reset()
if err := Compact(&buf, []byte(tt.compact)); err != nil {
t.Errorf("Compact(%#q): %v", tt.compact, err)
} else if s := buf.String(); s != tt.compact {
t.Errorf("Compact(%#q) = %#q, want original", tt.compact, s)
t.Errorf("%s: Compact error: %v", tt.Where, err)
} else if got := buf.String(); got != tt.compact {
t.Errorf("%s: Compact:\n\tgot: %s\n\twant: %s", tt.Where, indentNewlines(got), indentNewlines(tt.compact))
}
buf.Reset()
if err := Compact(&buf, []byte(tt.indent)); err != nil {
t.Errorf("Compact(%#q): %v", tt.indent, err)
continue
} else if s := buf.String(); s != tt.compact {
t.Errorf("Compact(%#q) = %#q, want %#q", tt.indent, s, tt.compact)
t.Errorf("%s: Compact error: %v", tt.Where, err)
} else if got := buf.String(); got != tt.compact {
t.Errorf("%s: Compact:\n\tgot: %s\n\twant: %s", tt.Where, indentNewlines(got), indentNewlines(tt.compact))
}
buf.Reset()
if err := Indent(&buf, []byte(tt.indent), "", "\t"); err != nil {
t.Errorf("%s: Indent error: %v", tt.Where, err)
} else if got := buf.String(); got != tt.indent {
t.Errorf("%s: Compact:\n\tgot: %s\n\twant: %s", tt.Where, indentNewlines(got), indentNewlines(tt.indent))
}
buf.Reset()
if err := Indent(&buf, []byte(tt.compact), "", "\t"); err != nil {
t.Errorf("%s: Indent error: %v", tt.Where, err)
} else if got := buf.String(); got != tt.indent {
t.Errorf("%s: Compact:\n\tgot: %s\n\twant: %s", tt.Where, indentNewlines(got), indentNewlines(tt.indent))
}
})
}
}
@ -88,38 +111,21 @@ func TestCompactSeparators(t *testing.T) {
// U+2028 and U+2029 should be escaped inside strings.
// They should not appear outside strings.
tests := []struct {
CaseName
in, compact string
}{
{"{\"\u2028\": 1}", "{\"\u2028\":1}"},
{"{\"\u2029\" :2}", "{\"\u2029\":2}"},
{Name(""), "{\"\u2028\": 1}", "{\"\u2028\":1}"},
{Name(""), "{\"\u2029\" :2}", "{\"\u2029\":2}"},
}
for _, tt := range tests {
t.Run(tt.Name, func(t *testing.T) {
var buf bytes.Buffer
if err := Compact(&buf, []byte(tt.in)); err != nil {
t.Errorf("Compact(%q): %v", tt.in, err)
} else if s := buf.String(); s != tt.compact {
t.Errorf("Compact(%q) = %q, want %q", tt.in, s, tt.compact)
}
}
}
func TestIndent(t *testing.T) {
var buf bytes.Buffer
for _, tt := range examples {
buf.Reset()
if err := Indent(&buf, []byte(tt.indent), "", "\t"); err != nil {
t.Errorf("Indent(%#q): %v", tt.indent, err)
} else if s := buf.String(); s != tt.indent {
t.Errorf("Indent(%#q) = %#q, want original", tt.indent, s)
}
buf.Reset()
if err := Indent(&buf, []byte(tt.compact), "", "\t"); err != nil {
t.Errorf("Indent(%#q): %v", tt.compact, err)
continue
} else if s := buf.String(); s != tt.indent {
t.Errorf("Indent(%#q) = %#q, want %#q", tt.compact, s, tt.indent)
t.Errorf("%s: Compact error: %v", tt.Where, err)
} else if got := buf.String(); got != tt.compact {
t.Errorf("%s: Compact:\n\tgot: %s\n\twant: %s", tt.Where, indentNewlines(got), indentNewlines(tt.compact))
}
})
}
}
@ -129,11 +135,11 @@ func TestCompactBig(t *testing.T) {
initBig()
var buf bytes.Buffer
if err := Compact(&buf, jsonBig); err != nil {
t.Fatalf("Compact: %v", err)
t.Fatalf("Compact error: %v", err)
}
b := buf.Bytes()
if !bytes.Equal(b, jsonBig) {
t.Error("Compact(jsonBig) != jsonBig")
t.Error("Compact:")
diff(t, b, jsonBig)
return
}
@ -144,23 +150,23 @@ func TestIndentBig(t *testing.T) {
initBig()
var buf bytes.Buffer
if err := Indent(&buf, jsonBig, "", "\t"); err != nil {
t.Fatalf("Indent1: %v", err)
t.Fatalf("Indent error: %v", err)
}
b := buf.Bytes()
if len(b) == len(jsonBig) {
// jsonBig is compact (no unnecessary spaces);
// indenting should make it bigger
t.Fatalf("Indent(jsonBig) did not get bigger")
t.Fatalf("Indent did not expand the input")
}
// should be idempotent
var buf1 bytes.Buffer
if err := Indent(&buf1, b, "", "\t"); err != nil {
t.Fatalf("Indent2: %v", err)
t.Fatalf("Indent error: %v", err)
}
b1 := buf1.Bytes()
if !bytes.Equal(b1, b) {
t.Error("Indent(Indent(jsonBig)) != Indent(jsonBig)")
t.Error("Indent(Indent(jsonBig)) != Indent(jsonBig):")
diff(t, b1, b)
return
}
@ -168,40 +174,40 @@ func TestIndentBig(t *testing.T) {
// should get back to original
buf1.Reset()
if err := Compact(&buf1, b); err != nil {
t.Fatalf("Compact: %v", err)
t.Fatalf("Compact error: %v", err)
}
b1 = buf1.Bytes()
if !bytes.Equal(b1, jsonBig) {
t.Error("Compact(Indent(jsonBig)) != jsonBig")
t.Error("Compact(Indent(jsonBig)) != jsonBig:")
diff(t, b1, jsonBig)
return
}
}
type indentErrorTest struct {
func TestIndentErrors(t *testing.T) {
tests := []struct {
CaseName
in string
err error
}{
{Name(""), `{"X": "foo", "Y"}`, &SyntaxError{"invalid character '}' after object key", 17}},
{Name(""), `{"X": "foo" "Y": "bar"}`, &SyntaxError{"invalid character '\"' after object key:value pair", 13}},
}
var indentErrorTests = []indentErrorTest{
{`{"X": "foo", "Y"}`, &SyntaxError{"invalid character '}' after object key", 17}},
{`{"X": "foo" "Y": "bar"}`, &SyntaxError{"invalid character '\"' after object key:value pair", 13}},
}
func TestIndentErrors(t *testing.T) {
for i, tt := range indentErrorTests {
for _, tt := range tests {
t.Run(tt.Name, func(t *testing.T) {
slice := make([]uint8, 0)
buf := bytes.NewBuffer(slice)
if err := Indent(buf, []uint8(tt.in), "", ""); err != nil {
if !reflect.DeepEqual(err, tt.err) {
t.Errorf("#%d: Indent: %#v", i, err)
continue
t.Fatalf("%s: Indent error:\n\tgot: %v\n\twant: %v", tt.Where, err, tt.err)
}
}
})
}
}
func diff(t *testing.T, a, b []byte) {
t.Helper()
for i := 0; ; i++ {
if i >= len(a) || i >= len(b) || a[i] != b[i] {
j := i - 10
@ -215,10 +221,7 @@ func diff(t *testing.T, a, b []byte) {
}
func trim(b []byte) []byte {
if len(b) > 20 {
return b[0:20]
}
return b
return b[:min(len(b), 20)]
}
// Generate a random JSON object.

View File

@ -33,7 +33,7 @@ func NewDecoder(r io.Reader) *Decoder {
}
// UseNumber causes the Decoder to unmarshal a number into an interface{} as a
// Number instead of as a float64.
// [Number] instead of as a float64.
func (dec *Decoder) UseNumber() { dec.d.useNumber = true }
// DisallowUnknownFields causes the Decoder to return an error when the destination
@ -41,13 +41,10 @@ func (dec *Decoder) UseNumber() { dec.d.useNumber = true }
// non-ignored, exported fields in the destination.
func (dec *Decoder) DisallowUnknownFields() { dec.d.disallowUnknownFields = true }
// TagKey sets a different TagKey (instead of "json")
func (dec *Decoder) TagKey(v string) { dec.d.tagkey = &v }
// Decode reads the next JSON-encoded value from its
// input and stores it in the value pointed to by v.
//
// See the documentation for Unmarshal for details about
// See the documentation for [Unmarshal] for details about
// the conversion of JSON into a Go value.
func (dec *Decoder) Decode(v any) error {
if dec.err != nil {
@ -82,7 +79,7 @@ func (dec *Decoder) Decode(v any) error {
}
// Buffered returns a reader of the data remaining in the Decoder's
// buffer. The reader is valid until the next call to Decode.
// buffer. The reader is valid until the next call to [Decoder.Decode].
func (dec *Decoder) Buffered() io.Reader {
return bytes.NewReader(dec.buf[dec.scanp:])
}
@ -186,7 +183,7 @@ type Encoder struct {
err error
escapeHTML bool
indentBuf *bytes.Buffer
indentBuf []byte
indentPrefix string
indentValue string
}
@ -197,15 +194,19 @@ func NewEncoder(w io.Writer) *Encoder {
}
// Encode writes the JSON encoding of v to the stream,
// with insignificant space characters elided,
// followed by a newline character.
//
// See the documentation for Marshal for details about the
// See the documentation for [Marshal] for details about the
// conversion of Go values to JSON.
func (enc *Encoder) Encode(v any) error {
if enc.err != nil {
return enc.err
}
e := newEncodeState()
defer encodeStatePool.Put(e)
err := e.marshal(v, encOpts{escapeHTML: enc.escapeHTML})
if err != nil {
return err
@ -221,20 +222,15 @@ func (enc *Encoder) Encode(v any) error {
b := e.Bytes()
if enc.indentPrefix != "" || enc.indentValue != "" {
if enc.indentBuf == nil {
enc.indentBuf = new(bytes.Buffer)
}
enc.indentBuf.Reset()
err = Indent(enc.indentBuf, b, enc.indentPrefix, enc.indentValue)
enc.indentBuf, err = appendIndent(enc.indentBuf[:0], b, enc.indentPrefix, enc.indentValue)
if err != nil {
return err
}
b = enc.indentBuf.Bytes()
b = enc.indentBuf
}
if _, err = enc.w.Write(b); err != nil {
enc.err = err
}
encodeStatePool.Put(e)
return err
}
@ -258,7 +254,7 @@ func (enc *Encoder) SetEscapeHTML(on bool) {
}
// RawMessage is a raw encoded JSON value.
// It implements Marshaler and Unmarshaler and can
// It implements [Marshaler] and [Unmarshaler] and can
// be used to delay JSON decoding or precompute a JSON encoding.
type RawMessage []byte
@ -284,12 +280,12 @@ var _ Unmarshaler = (*RawMessage)(nil)
// A Token holds a value of one of these types:
//
// Delim, for the four JSON delimiters [ ] { }
// bool, for JSON booleans
// float64, for JSON numbers
// Number, for JSON numbers
// string, for JSON string literals
// nil, for JSON null
// - [Delim], for the four JSON delimiters [ ] { }
// - bool, for JSON booleans
// - float64, for JSON numbers
// - [Number], for JSON numbers
// - string, for JSON string literals
// - nil, for JSON null
type Token any
const (
@ -359,14 +355,14 @@ func (d Delim) String() string {
}
// Token returns the next JSON token in the input stream.
// At the end of the input stream, Token returns nil, io.EOF.
// At the end of the input stream, Token returns nil, [io.EOF].
//
// Token guarantees that the delimiters [ ] { } it returns are
// properly nested and matched: if Token encounters an unexpected
// delimiter in the input, it will return an error.
//
// The input stream consists of basic JSON values—bool, string,
// number, and null—along with delimiters [ ] { } of type Delim
// number, and null—along with delimiters [ ] { } of type [Delim]
// to mark the start and end of arrays and objects.
// Commas and colons are elided.
func (dec *Decoder) Token() (Token, error) {

View File

@ -6,16 +6,44 @@ package json
import (
"bytes"
"fmt"
"io"
"log"
"net"
"net/http"
"net/http/httptest"
"path"
"reflect"
"runtime"
"runtime/debug"
"strings"
"testing"
)
// TODO(https://go.dev/issue/52751): Replace with native testing support.
// CaseName is a case name annotated with a file and line.
type CaseName struct {
Name string
Where CasePos
}
// Name annotates a case name with the file and line of the caller.
func Name(s string) (c CaseName) {
c.Name = s
runtime.Callers(2, c.Where.pc[:])
return c
}
// CasePos represents a file and line number.
type CasePos struct{ pc [1]uintptr }
func (pos CasePos) String() string {
frames := runtime.CallersFrames(pos.pc[:])
frame, _ := frames.Next()
return fmt.Sprintf("%s:%d", path.Base(frame.File), frame.Line)
}
// Test values for the stream test.
// One of each JSON kind.
var streamTest = []any{
@ -41,24 +69,61 @@ false
func TestEncoder(t *testing.T) {
for i := 0; i <= len(streamTest); i++ {
var buf bytes.Buffer
var buf strings.Builder
enc := NewEncoder(&buf)
// Check that enc.SetIndent("", "") turns off indentation.
enc.SetIndent(">", ".")
enc.SetIndent("", "")
for j, v := range streamTest[0:i] {
if err := enc.Encode(v); err != nil {
t.Fatalf("encode #%d: %v", j, err)
t.Fatalf("#%d.%d Encode error: %v", i, j, err)
}
}
if have, want := buf.String(), nlines(streamEncoded, i); have != want {
t.Errorf("encoding %d items: mismatch", i)
t.Errorf("encoding %d items: mismatch:", i)
diff(t, []byte(have), []byte(want))
break
}
}
}
func TestEncoderErrorAndReuseEncodeState(t *testing.T) {
// Disable the GC temporarily to prevent encodeState's in Pool being cleaned away during the test.
percent := debug.SetGCPercent(-1)
defer debug.SetGCPercent(percent)
// Trigger an error in Marshal with cyclic data.
type Dummy struct {
Name string
Next *Dummy
}
dummy := Dummy{Name: "Dummy"}
dummy.Next = &dummy
var buf bytes.Buffer
enc := NewEncoder(&buf)
if err := enc.Encode(dummy); err == nil {
t.Errorf("Encode(dummy) error: got nil, want non-nil")
}
type Data struct {
A string
I int
}
want := Data{A: "a", I: 1}
if err := enc.Encode(want); err != nil {
t.Errorf("Marshal error: %v", err)
}
var got Data
if err := Unmarshal(buf.Bytes(), &got); err != nil {
t.Errorf("Unmarshal error: %v", err)
}
if got != want {
t.Errorf("Marshal/Unmarshal roundtrip:\n\tgot: %v\n\twant: %v", got, want)
}
}
var streamEncodedIndent = `0.1
"hello"
null
@ -77,14 +142,14 @@ false
`
func TestEncoderIndent(t *testing.T) {
var buf bytes.Buffer
var buf strings.Builder
enc := NewEncoder(&buf)
enc.SetIndent(">", ".")
for _, v := range streamTest {
enc.Encode(v)
}
if have, want := buf.String(), streamEncodedIndent; have != want {
t.Error("indented encoding mismatch")
t.Error("Encode mismatch:")
diff(t, []byte(have), []byte(want))
}
}
@ -122,50 +187,51 @@ func TestEncoderSetEscapeHTML(t *testing.T) {
Bar string `json:"bar,string"`
}{`<html>foobar</html>`}
for _, tt := range []struct {
name string
tests := []struct {
CaseName
v any
wantEscape string
want string
}{
{"c", c, `"\u003c\u0026\u003e"`, `"<&>"`},
{"ct", ct, `"\"\u003c\u0026\u003e\""`, `"\"<&>\""`},
{`"<&>"`, "<&>", `"\u003c\u0026\u003e"`, `"<&>"`},
{Name("c"), c, `"\u003c\u0026\u003e"`, `"<&>"`},
{Name("ct"), ct, `"\"\u003c\u0026\u003e\""`, `"\"<&>\""`},
{Name(`"<&>"`), "<&>", `"\u003c\u0026\u003e"`, `"<&>"`},
{
"tagStruct", tagStruct,
Name("tagStruct"), tagStruct,
`{"\u003c\u003e\u0026#! ":0,"Invalid":0}`,
`{"<>&#! ":0,"Invalid":0}`,
},
{
`"<str>"`, marshalerStruct,
Name(`"<str>"`), marshalerStruct,
`{"NonPtr":"\u003cstr\u003e","Ptr":"\u003cstr\u003e"}`,
`{"NonPtr":"<str>","Ptr":"<str>"}`,
},
{
"stringOption", stringOption,
Name("stringOption"), stringOption,
`{"bar":"\"\\u003chtml\\u003efoobar\\u003c/html\\u003e\""}`,
`{"bar":"\"<html>foobar</html>\""}`,
},
} {
var buf bytes.Buffer
}
for _, tt := range tests {
t.Run(tt.Name, func(t *testing.T) {
var buf strings.Builder
enc := NewEncoder(&buf)
if err := enc.Encode(tt.v); err != nil {
t.Errorf("Encode(%s): %s", tt.name, err)
continue
t.Fatalf("%s: Encode(%s) error: %s", tt.Where, tt.Name, err)
}
if got := strings.TrimSpace(buf.String()); got != tt.wantEscape {
t.Errorf("Encode(%s) = %#q, want %#q", tt.name, got, tt.wantEscape)
t.Errorf("%s: Encode(%s):\n\tgot: %s\n\twant: %s", tt.Where, tt.Name, got, tt.wantEscape)
}
buf.Reset()
enc.SetEscapeHTML(false)
if err := enc.Encode(tt.v); err != nil {
t.Errorf("SetEscapeHTML(false) Encode(%s): %s", tt.name, err)
continue
t.Fatalf("%s: SetEscapeHTML(false) Encode(%s) error: %s", tt.Where, tt.Name, err)
}
if got := strings.TrimSpace(buf.String()); got != tt.want {
t.Errorf("SetEscapeHTML(false) Encode(%s) = %#q, want %#q",
tt.name, got, tt.want)
t.Errorf("%s: SetEscapeHTML(false) Encode(%s):\n\tgot: %s\n\twant: %s",
tt.Where, tt.Name, got, tt.want)
}
})
}
}
@ -186,14 +252,14 @@ func TestDecoder(t *testing.T) {
dec := NewDecoder(&buf)
for j := range out {
if err := dec.Decode(&out[j]); err != nil {
t.Fatalf("decode #%d/%d: %v", j, i, err)
t.Fatalf("decode #%d/%d error: %v", j, i, err)
}
}
if !reflect.DeepEqual(out, streamTest[0:i]) {
t.Errorf("decoding %d items: mismatch", i)
t.Errorf("decoding %d items: mismatch:", i)
for j := range out {
if !reflect.DeepEqual(out[j], streamTest[j]) {
t.Errorf("#%d: have %v want %v", j, out[j], streamTest[j])
t.Errorf("#%d:\n\tgot: %v\n\twant: %v", j, out[j], streamTest[j])
}
}
break
@ -212,14 +278,14 @@ func TestDecoderBuffered(t *testing.T) {
t.Fatal(err)
}
if m.Name != "Gopher" {
t.Errorf("Name = %q; want Gopher", m.Name)
t.Errorf("Name = %s, want Gopher", m.Name)
}
rest, err := io.ReadAll(d.Buffered())
if err != nil {
t.Fatal(err)
}
if g, w := string(rest), " extra "; g != w {
t.Errorf("Remaining = %q; want %q", g, w)
if got, want := string(rest), " extra "; got != want {
t.Errorf("Remaining = %s, want %s", got, want)
}
}
@ -244,20 +310,20 @@ func TestRawMessage(t *testing.T) {
Y float32
}
const raw = `["\u0056",null]`
const msg = `{"X":0.1,"Id":["\u0056",null],"Y":0.2}`
err := Unmarshal([]byte(msg), &data)
const want = `{"X":0.1,"Id":["\u0056",null],"Y":0.2}`
err := Unmarshal([]byte(want), &data)
if err != nil {
t.Fatalf("Unmarshal: %v", err)
t.Fatalf("Unmarshal error: %v", err)
}
if string([]byte(data.Id)) != raw {
t.Fatalf("Raw mismatch: have %#q want %#q", []byte(data.Id), raw)
t.Fatalf("Unmarshal:\n\tgot: %s\n\twant: %s", []byte(data.Id), raw)
}
b, err := Marshal(&data)
got, err := Marshal(&data)
if err != nil {
t.Fatalf("Marshal: %v", err)
t.Fatalf("Marshal error: %v", err)
}
if string(b) != msg {
t.Fatalf("Marshal: have %#q want %#q", b, msg)
if string(got) != want {
t.Fatalf("Marshal:\n\tgot: %s\n\twant: %s", got, want)
}
}
@ -268,174 +334,156 @@ func TestNullRawMessage(t *testing.T) {
IdPtr *RawMessage
Y float32
}
const msg = `{"X":0.1,"Id":null,"IdPtr":null,"Y":0.2}`
err := Unmarshal([]byte(msg), &data)
const want = `{"X":0.1,"Id":null,"IdPtr":null,"Y":0.2}`
err := Unmarshal([]byte(want), &data)
if err != nil {
t.Fatalf("Unmarshal: %v", err)
t.Fatalf("Unmarshal error: %v", err)
}
if want, got := "null", string(data.Id); want != got {
t.Fatalf("Raw mismatch: have %q, want %q", got, want)
t.Fatalf("Unmarshal:\n\tgot: %s\n\twant: %s", got, want)
}
if data.IdPtr != nil {
t.Fatalf("Raw pointer mismatch: have non-nil, want nil")
t.Fatalf("pointer mismatch: got non-nil, want nil")
}
b, err := Marshal(&data)
got, err := Marshal(&data)
if err != nil {
t.Fatalf("Marshal: %v", err)
t.Fatalf("Marshal error: %v", err)
}
if string(b) != msg {
t.Fatalf("Marshal: have %#q want %#q", b, msg)
if string(got) != want {
t.Fatalf("Marshal:\n\tgot: %s\n\twant: %s", got, want)
}
}
var blockingTests = []string{
`{"x": 1}`,
`[1, 2, 3]`,
}
func TestBlocking(t *testing.T) {
for _, enc := range blockingTests {
tests := []struct {
CaseName
in string
}{
{Name(""), `{"x": 1}`},
{Name(""), `[1, 2, 3]`},
}
for _, tt := range tests {
t.Run(tt.Name, func(t *testing.T) {
r, w := net.Pipe()
go w.Write([]byte(enc))
go w.Write([]byte(tt.in))
var val any
// If Decode reads beyond what w.Write writes above,
// it will block, and the test will deadlock.
if err := NewDecoder(r).Decode(&val); err != nil {
t.Errorf("decoding %s: %v", enc, err)
t.Errorf("%s: NewDecoder(%s).Decode error: %v", tt.Where, tt.in, err)
}
r.Close()
w.Close()
}
}
func BenchmarkEncoderEncode(b *testing.B) {
b.ReportAllocs()
type T struct {
X, Y string
}
v := &T{"foo", "bar"}
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
if err := NewEncoder(io.Discard).Encode(v); err != nil {
b.Fatal(err)
}
}
})
}
type tokenStreamCase struct {
json string
expTokens []any
}
type decodeThis struct {
v any
}
var tokenStreamCases = []tokenStreamCase{
func TestDecodeInStream(t *testing.T) {
tests := []struct {
CaseName
json string
expTokens []any
}{
// streaming token cases
{json: `10`, expTokens: []any{float64(10)}},
{json: ` [10] `, expTokens: []any{
{CaseName: Name(""), json: `10`, expTokens: []any{float64(10)}},
{CaseName: Name(""), json: ` [10] `, expTokens: []any{
Delim('['), float64(10), Delim(']')}},
{json: ` [false,10,"b"] `, expTokens: []any{
{CaseName: Name(""), json: ` [false,10,"b"] `, expTokens: []any{
Delim('['), false, float64(10), "b", Delim(']')}},
{json: `{ "a": 1 }`, expTokens: []any{
{CaseName: Name(""), json: `{ "a": 1 }`, expTokens: []any{
Delim('{'), "a", float64(1), Delim('}')}},
{json: `{"a": 1, "b":"3"}`, expTokens: []any{
{CaseName: Name(""), json: `{"a": 1, "b":"3"}`, expTokens: []any{
Delim('{'), "a", float64(1), "b", "3", Delim('}')}},
{json: ` [{"a": 1},{"a": 2}] `, expTokens: []any{
{CaseName: Name(""), json: ` [{"a": 1},{"a": 2}] `, expTokens: []any{
Delim('['),
Delim('{'), "a", float64(1), Delim('}'),
Delim('{'), "a", float64(2), Delim('}'),
Delim(']')}},
{json: `{"obj": {"a": 1}}`, expTokens: []any{
{CaseName: Name(""), json: `{"obj": {"a": 1}}`, expTokens: []any{
Delim('{'), "obj", Delim('{'), "a", float64(1), Delim('}'),
Delim('}')}},
{json: `{"obj": [{"a": 1}]}`, expTokens: []any{
{CaseName: Name(""), json: `{"obj": [{"a": 1}]}`, expTokens: []any{
Delim('{'), "obj", Delim('['),
Delim('{'), "a", float64(1), Delim('}'),
Delim(']'), Delim('}')}},
// streaming tokens with intermittent Decode()
{json: `{ "a": 1 }`, expTokens: []any{
{CaseName: Name(""), json: `{ "a": 1 }`, expTokens: []any{
Delim('{'), "a",
decodeThis{float64(1)},
Delim('}')}},
{json: ` [ { "a" : 1 } ] `, expTokens: []any{
{CaseName: Name(""), json: ` [ { "a" : 1 } ] `, expTokens: []any{
Delim('['),
decodeThis{map[string]any{"a": float64(1)}},
Delim(']')}},
{json: ` [{"a": 1},{"a": 2}] `, expTokens: []any{
{CaseName: Name(""), json: ` [{"a": 1},{"a": 2}] `, expTokens: []any{
Delim('['),
decodeThis{map[string]any{"a": float64(1)}},
decodeThis{map[string]any{"a": float64(2)}},
Delim(']')}},
{json: `{ "obj" : [ { "a" : 1 } ] }`, expTokens: []any{
{CaseName: Name(""), json: `{ "obj" : [ { "a" : 1 } ] }`, expTokens: []any{
Delim('{'), "obj", Delim('['),
decodeThis{map[string]any{"a": float64(1)}},
Delim(']'), Delim('}')}},
{json: `{"obj": {"a": 1}}`, expTokens: []any{
{CaseName: Name(""), json: `{"obj": {"a": 1}}`, expTokens: []any{
Delim('{'), "obj",
decodeThis{map[string]any{"a": float64(1)}},
Delim('}')}},
{json: `{"obj": [{"a": 1}]}`, expTokens: []any{
{CaseName: Name(""), json: `{"obj": [{"a": 1}]}`, expTokens: []any{
Delim('{'), "obj",
decodeThis{[]any{
map[string]any{"a": float64(1)},
}},
Delim('}')}},
{json: ` [{"a": 1} {"a": 2}] `, expTokens: []any{
{CaseName: Name(""), json: ` [{"a": 1} {"a": 2}] `, expTokens: []any{
Delim('['),
decodeThis{map[string]any{"a": float64(1)}},
decodeThis{&SyntaxError{"expected comma after array element", 11}},
}},
{json: `{ "` + strings.Repeat("a", 513) + `" 1 }`, expTokens: []any{
{CaseName: Name(""), json: `{ "` + strings.Repeat("a", 513) + `" 1 }`, expTokens: []any{
Delim('{'), strings.Repeat("a", 513),
decodeThis{&SyntaxError{"expected colon after object key", 518}},
}},
{json: `{ "\a" }`, expTokens: []any{
{CaseName: Name(""), json: `{ "\a" }`, expTokens: []any{
Delim('{'),
&SyntaxError{"invalid character 'a' in string escape code", 3},
}},
{json: ` \a`, expTokens: []any{
{CaseName: Name(""), json: ` \a`, expTokens: []any{
&SyntaxError{"invalid character '\\\\' looking for beginning of value", 1},
}},
}
func TestDecodeInStream(t *testing.T) {
for ci, tcase := range tokenStreamCases {
dec := NewDecoder(strings.NewReader(tcase.json))
for i, etk := range tcase.expTokens {
var tk any
for _, tt := range tests {
t.Run(tt.Name, func(t *testing.T) {
dec := NewDecoder(strings.NewReader(tt.json))
for i, want := range tt.expTokens {
var got any
var err error
if dt, ok := etk.(decodeThis); ok {
etk = dt.v
err = dec.Decode(&tk)
if dt, ok := want.(decodeThis); ok {
want = dt.v
err = dec.Decode(&got)
} else {
tk, err = dec.Token()
got, err = dec.Token()
}
if experr, ok := etk.(error); ok {
if err == nil || !reflect.DeepEqual(err, experr) {
t.Errorf("case %v: Expected error %#v in %q, but was %#v", ci, experr, tcase.json, err)
if errWant, ok := want.(error); ok {
if err == nil || !reflect.DeepEqual(err, errWant) {
t.Fatalf("%s:\n\tinput: %s\n\tgot error: %v\n\twant error: %v", tt.Where, tt.json, err, errWant)
}
break
} else if err == io.EOF {
t.Errorf("case %v: Unexpected EOF in %q", ci, tcase.json)
break
} else if err != nil {
t.Errorf("case %v: Unexpected error '%#v' in %q", ci, err, tcase.json)
break
t.Fatalf("%s:\n\tinput: %s\n\tgot error: %v\n\twant error: nil", tt.Where, tt.json, err)
}
if !reflect.DeepEqual(tk, etk) {
t.Errorf(`case %v: %q @ %v expected %T(%v) was %T(%v)`, ci, tcase.json, i, etk, etk, tk, tk)
break
if !reflect.DeepEqual(got, want) {
t.Fatalf("%s: token %d:\n\tinput: %s\n\tgot: %T(%v)\n\twant: %T(%v)", tt.Where, i, tt.json, got, got, want, want)
}
}
})
}
}
@ -449,7 +497,7 @@ func TestHTTPDecoding(t *testing.T) {
defer ts.Close()
res, err := http.Get(ts.URL)
if err != nil {
log.Fatalf("GET failed: %v", err)
log.Fatalf("http.Get error: %v", err)
}
defer res.Body.Close()
@ -460,15 +508,15 @@ func TestHTTPDecoding(t *testing.T) {
d := NewDecoder(res.Body)
err = d.Decode(&foo)
if err != nil {
t.Fatalf("Decode: %v", err)
t.Fatalf("Decode error: %v", err)
}
if foo.Foo != "bar" {
t.Errorf("decoded %q; want \"bar\"", foo.Foo)
t.Errorf(`Decode: got %q, want "bar"`, foo.Foo)
}
// make sure we get the EOF the second time
err = d.Decode(&foo)
if err != io.EOF {
t.Errorf("err = %v; want io.EOF", err)
t.Errorf("Decode error:\n\tgot: %v\n\twant: io.EOF", err)
}
}

View File

@ -72,49 +72,50 @@ type unicodeTag struct {
W string `json:"Ελλάδα"`
}
var structTagObjectKeyTests = []struct {
func TestStructTagObjectKey(t *testing.T) {
tests := []struct {
CaseName
raw any
value string
key string
}{
{basicLatin2xTag{"2x"}, "2x", "$%-/"},
{basicLatin3xTag{"3x"}, "3x", "0123456789"},
{basicLatin4xTag{"4x"}, "4x", "ABCDEFGHIJKLMO"},
{basicLatin5xTag{"5x"}, "5x", "PQRSTUVWXYZ_"},
{basicLatin6xTag{"6x"}, "6x", "abcdefghijklmno"},
{basicLatin7xTag{"7x"}, "7x", "pqrstuvwxyz"},
{miscPlaneTag{"いろはにほへと"}, "いろはにほへと", "色は匂へど"},
{dashTag{"foo"}, "foo", "-"},
{emptyTag{"Pour Moi"}, "Pour Moi", "W"},
{misnamedTag{"Animal Kingdom"}, "Animal Kingdom", "X"},
{badFormatTag{"Orfevre"}, "Orfevre", "Y"},
{badCodeTag{"Reliable Man"}, "Reliable Man", "Z"},
{percentSlashTag{"brut"}, "brut", "text/html%"},
{punctuationTag{"Union Rags"}, "Union Rags", "!#$%&()*+-./:;<=>?@[]^_{|}~ "},
{spaceTag{"Perreddu"}, "Perreddu", "With space"},
{unicodeTag{"Loukanikos"}, "Loukanikos", "Ελλάδα"},
{Name(""), basicLatin2xTag{"2x"}, "2x", "$%-/"},
{Name(""), basicLatin3xTag{"3x"}, "3x", "0123456789"},
{Name(""), basicLatin4xTag{"4x"}, "4x", "ABCDEFGHIJKLMO"},
{Name(""), basicLatin5xTag{"5x"}, "5x", "PQRSTUVWXYZ_"},
{Name(""), basicLatin6xTag{"6x"}, "6x", "abcdefghijklmno"},
{Name(""), basicLatin7xTag{"7x"}, "7x", "pqrstuvwxyz"},
{Name(""), miscPlaneTag{"いろはにほへと"}, "いろはにほへと", "色は匂へど"},
{Name(""), dashTag{"foo"}, "foo", "-"},
{Name(""), emptyTag{"Pour Moi"}, "Pour Moi", "W"},
{Name(""), misnamedTag{"Animal Kingdom"}, "Animal Kingdom", "X"},
{Name(""), badFormatTag{"Orfevre"}, "Orfevre", "Y"},
{Name(""), badCodeTag{"Reliable Man"}, "Reliable Man", "Z"},
{Name(""), percentSlashTag{"brut"}, "brut", "text/html%"},
{Name(""), punctuationTag{"Union Rags"}, "Union Rags", "!#$%&()*+-./:;<=>?@[]^_{|}~ "},
{Name(""), spaceTag{"Perreddu"}, "Perreddu", "With space"},
{Name(""), unicodeTag{"Loukanikos"}, "Loukanikos", "Ελλάδα"},
}
func TestStructTagObjectKey(t *testing.T) {
for _, tt := range structTagObjectKeyTests {
for _, tt := range tests {
t.Run(tt.Name, func(t *testing.T) {
b, err := Marshal(tt.raw)
if err != nil {
t.Fatalf("Marshal(%#q) failed: %v", tt.raw, err)
t.Fatalf("%s: Marshal error: %v", tt.Where, err)
}
var f any
err = Unmarshal(b, &f)
if err != nil {
t.Fatalf("Unmarshal(%#q) failed: %v", b, err)
t.Fatalf("%s: Unmarshal error: %v", tt.Where, err)
}
for i, v := range f.(map[string]any) {
switch i {
case tt.key:
for k, v := range f.(map[string]any) {
if k == tt.key {
if s, ok := v.(string); !ok || s != tt.value {
t.Fatalf("Unexpected value: %#q, want %v", s, tt.value)
t.Fatalf("%s: Unmarshal(%#q) value:\n\tgot: %q\n\twant: %q", tt.Where, b, s, tt.value)
}
default:
t.Fatalf("Unexpected key: %#q, from %#q", i, b)
} else {
t.Fatalf("%s: Unmarshal(%#q): unexpected key: %q", tt.Where, b, k)
}
}
})
}
}

View File

@ -22,7 +22,7 @@ func TestTagParsing(t *testing.T) {
{"bar", false},
} {
if opts.Contains(tt.opt) != tt.want {
t.Errorf("Contains(%q) = %v", tt.opt, !tt.want)
t.Errorf("Contains(%q) = %v, want %v", tt.opt, !tt.want, tt.want)
}
}
}