chore: dependencies upgrade

This commit is contained in:
2023-10-15 12:10:45 +02:00
parent 58538cff20
commit f4807ee9e6
91 changed files with 2329 additions and 2542 deletions

View File

@ -22,20 +22,20 @@ package zapcore
import (
"fmt"
"sync"
"go.uber.org/zap/buffer"
"go.uber.org/zap/internal/bufferpool"
"go.uber.org/zap/internal/pool"
)
var _sliceEncoderPool = sync.Pool{
New: func() interface{} {
return &sliceArrayEncoder{elems: make([]interface{}, 0, 2)}
},
}
var _sliceEncoderPool = pool.New(func() *sliceArrayEncoder {
return &sliceArrayEncoder{
elems: make([]interface{}, 0, 2),
}
})
func getSliceEncoder() *sliceArrayEncoder {
return _sliceEncoderPool.Get().(*sliceArrayEncoder)
return _sliceEncoderPool.Get()
}
func putSliceEncoder(e *sliceArrayEncoder) {

View File

@ -102,9 +102,9 @@ func (c *ioCore) Write(ent Entry, fields []Field) error {
return err
}
if ent.Level > ErrorLevel {
// Since we may be crashing the program, sync the output. Ignore Sync
// errors, pending a clean solution to issue #370.
c.Sync()
// Since we may be crashing the program, sync the output.
// Ignore Sync errors, pending a clean solution to issue #370.
_ = c.Sync()
}
return nil
}

View File

@ -24,25 +24,23 @@ import (
"fmt"
"runtime"
"strings"
"sync"
"time"
"go.uber.org/multierr"
"go.uber.org/zap/internal/bufferpool"
"go.uber.org/zap/internal/exit"
"go.uber.org/zap/internal/pool"
)
var (
_cePool = sync.Pool{New: func() interface{} {
// Pre-allocate some space for cores.
return &CheckedEntry{
cores: make([]Core, 4),
}
}}
)
var _cePool = pool.New(func() *CheckedEntry {
// Pre-allocate some space for cores.
return &CheckedEntry{
cores: make([]Core, 4),
}
})
func getCheckedEntry() *CheckedEntry {
ce := _cePool.Get().(*CheckedEntry)
ce := _cePool.Get()
ce.reset()
return ce
}
@ -244,7 +242,7 @@ func (ce *CheckedEntry) Write(fields ...Field) {
// CheckedEntry is being used after it was returned to the pool,
// the message may be an amalgamation from multiple call sites.
fmt.Fprintf(ce.ErrorOutput, "%v Unsafe CheckedEntry re-use near Entry %+v.\n", ce.Time, ce.Entry)
ce.ErrorOutput.Sync()
_ = ce.ErrorOutput.Sync() // ignore error
}
return
}
@ -256,7 +254,7 @@ func (ce *CheckedEntry) Write(fields ...Field) {
}
if err != nil && ce.ErrorOutput != nil {
fmt.Fprintf(ce.ErrorOutput, "%v write error: %v\n", ce.Time, err)
ce.ErrorOutput.Sync()
_ = ce.ErrorOutput.Sync() // ignore error
}
hook := ce.after
@ -281,7 +279,8 @@ func (ce *CheckedEntry) AddCore(ent Entry, core Core) *CheckedEntry {
// Should sets this CheckedEntry's CheckWriteAction, which controls whether a
// Core will panic or fatal after writing this log entry. Like AddCore, it's
// safe to call on nil CheckedEntry references.
// Deprecated: Use After(ent Entry, after CheckWriteHook) instead.
//
// Deprecated: Use [CheckedEntry.After] instead.
func (ce *CheckedEntry) Should(ent Entry, should CheckWriteAction) *CheckedEntry {
return ce.After(ent, should)
}

View File

@ -23,7 +23,8 @@ package zapcore
import (
"fmt"
"reflect"
"sync"
"go.uber.org/zap/internal/pool"
)
// Encodes the given error into fields of an object. A field with the given
@ -97,15 +98,18 @@ func (errs errArray) MarshalLogArray(arr ArrayEncoder) error {
}
el := newErrArrayElem(errs[i])
arr.AppendObject(el)
err := arr.AppendObject(el)
el.Free()
if err != nil {
return err
}
}
return nil
}
var _errArrayElemPool = sync.Pool{New: func() interface{} {
var _errArrayElemPool = pool.New(func() *errArrayElem {
return &errArrayElem{}
}}
})
// Encodes any error into a {"error": ...} re-using the same errors logic.
//
@ -113,7 +117,7 @@ var _errArrayElemPool = sync.Pool{New: func() interface{} {
type errArrayElem struct{ err error }
func newErrArrayElem(err error) *errArrayElem {
e := _errArrayElemPool.Get().(*errArrayElem)
e := _errArrayElemPool.Get()
e.err = err
return e
}

View File

@ -23,24 +23,20 @@ package zapcore
import (
"encoding/base64"
"math"
"sync"
"time"
"unicode/utf8"
"go.uber.org/zap/buffer"
"go.uber.org/zap/internal/bufferpool"
"go.uber.org/zap/internal/pool"
)
// For JSON-escaping; see jsonEncoder.safeAddString below.
const _hex = "0123456789abcdef"
var _jsonPool = sync.Pool{New: func() interface{} {
var _jsonPool = pool.New(func() *jsonEncoder {
return &jsonEncoder{}
}}
func getJSONEncoder() *jsonEncoder {
return _jsonPool.Get().(*jsonEncoder)
}
})
func putJSONEncoder(enc *jsonEncoder) {
if enc.reflectBuf != nil {
@ -354,7 +350,7 @@ func (enc *jsonEncoder) Clone() Encoder {
}
func (enc *jsonEncoder) clone() *jsonEncoder {
clone := getJSONEncoder()
clone := _jsonPool.Get()
clone.EncoderConfig = enc.EncoderConfig
clone.spaced = enc.spaced
clone.openNamespaces = enc.openNamespaces
@ -490,73 +486,98 @@ func (enc *jsonEncoder) appendFloat(val float64, bitSize int) {
// Unlike the standard library's encoder, it doesn't attempt to protect the
// user from browser vulnerabilities or JSONP-related problems.
func (enc *jsonEncoder) safeAddString(s string) {
for i := 0; i < len(s); {
if enc.tryAddRuneSelf(s[i]) {
i++
continue
}
r, size := utf8.DecodeRuneInString(s[i:])
if enc.tryAddRuneError(r, size) {
i++
continue
}
enc.buf.AppendString(s[i : i+size])
i += size
}
safeAppendStringLike(
(*buffer.Buffer).AppendString,
utf8.DecodeRuneInString,
enc.buf,
s,
)
}
// safeAddByteString is no-alloc equivalent of safeAddString(string(s)) for s []byte.
func (enc *jsonEncoder) safeAddByteString(s []byte) {
safeAppendStringLike(
(*buffer.Buffer).AppendBytes,
utf8.DecodeRune,
enc.buf,
s,
)
}
// safeAppendStringLike is a generic implementation of safeAddString and safeAddByteString.
// It appends a string or byte slice to the buffer, escaping all special characters.
func safeAppendStringLike[S []byte | string](
// appendTo appends this string-like object to the buffer.
appendTo func(*buffer.Buffer, S),
// decodeRune decodes the next rune from the string-like object
// and returns its value and width in bytes.
decodeRune func(S) (rune, int),
buf *buffer.Buffer,
s S,
) {
// The encoding logic below works by skipping over characters
// that can be safely copied as-is,
// until a character is found that needs special handling.
// At that point, we copy everything we've seen so far,
// and then handle that special character.
//
// last is the index of the last byte that was copied to the buffer.
last := 0
for i := 0; i < len(s); {
if enc.tryAddRuneSelf(s[i]) {
i++
continue
}
r, size := utf8.DecodeRune(s[i:])
if enc.tryAddRuneError(r, size) {
i++
continue
}
enc.buf.Write(s[i : i+size])
i += size
}
}
if s[i] >= utf8.RuneSelf {
// Character >= RuneSelf may be part of a multi-byte rune.
// They need to be decoded before we can decide how to handle them.
r, size := decodeRune(s[i:])
if r != utf8.RuneError || size != 1 {
// No special handling required.
// Skip over this rune and continue.
i += size
continue
}
// tryAddRuneSelf appends b if it is valid UTF-8 character represented in a single byte.
func (enc *jsonEncoder) tryAddRuneSelf(b byte) bool {
if b >= utf8.RuneSelf {
return false
}
if 0x20 <= b && b != '\\' && b != '"' {
enc.buf.AppendByte(b)
return true
}
switch b {
case '\\', '"':
enc.buf.AppendByte('\\')
enc.buf.AppendByte(b)
case '\n':
enc.buf.AppendByte('\\')
enc.buf.AppendByte('n')
case '\r':
enc.buf.AppendByte('\\')
enc.buf.AppendByte('r')
case '\t':
enc.buf.AppendByte('\\')
enc.buf.AppendByte('t')
default:
// Encode bytes < 0x20, except for the escape sequences above.
enc.buf.AppendString(`\u00`)
enc.buf.AppendByte(_hex[b>>4])
enc.buf.AppendByte(_hex[b&0xF])
}
return true
}
// Invalid UTF-8 sequence.
// Replace it with the Unicode replacement character.
appendTo(buf, s[last:i])
buf.AppendString(`\ufffd`)
func (enc *jsonEncoder) tryAddRuneError(r rune, size int) bool {
if r == utf8.RuneError && size == 1 {
enc.buf.AppendString(`\ufffd`)
return true
i++
last = i
} else {
// Character < RuneSelf is a single-byte UTF-8 rune.
if s[i] >= 0x20 && s[i] != '\\' && s[i] != '"' {
// No escaping necessary.
// Skip over this character and continue.
i++
continue
}
// This character needs to be escaped.
appendTo(buf, s[last:i])
switch s[i] {
case '\\', '"':
buf.AppendByte('\\')
buf.AppendByte(s[i])
case '\n':
buf.AppendByte('\\')
buf.AppendByte('n')
case '\r':
buf.AppendByte('\\')
buf.AppendByte('r')
case '\t':
buf.AppendByte('\\')
buf.AppendByte('t')
default:
// Encode bytes < 0x20, except for the escape sequences above.
buf.AppendString(`\u00`)
buf.AppendByte(_hex[s[i]>>4])
buf.AppendByte(_hex[s[i]&0xF])
}
i++
last = i
}
}
return false
// add remaining
appendTo(buf, s[last:])
}

54
vendor/go.uber.org/zap/zapcore/lazy_with.go generated vendored Normal file
View File

@ -0,0 +1,54 @@
// Copyright (c) 2023 Uber Technologies, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package zapcore
import "sync"
type lazyWithCore struct {
Core
sync.Once
fields []Field
}
// NewLazyWith wraps a Core with a "lazy" Core that will only encode fields if
// the logger is written to (or is further chained in a lon-lazy manner).
func NewLazyWith(core Core, fields []Field) Core {
return &lazyWithCore{
Core: core,
fields: fields,
}
}
func (d *lazyWithCore) initOnce() {
d.Once.Do(func() {
d.Core = d.Core.With(d.fields)
})
}
func (d *lazyWithCore) With(fields []Field) Core {
d.initOnce()
return d.Core.With(fields)
}
func (d *lazyWithCore) Check(e Entry, ce *CheckedEntry) *CheckedEntry {
d.initOnce()
return d.Core.Check(e, ce)
}

View File

@ -21,9 +21,8 @@
package zapcore
import (
"sync/atomic"
"time"
"go.uber.org/atomic"
)
const (
@ -66,16 +65,16 @@ func (c *counter) IncCheckReset(t time.Time, tick time.Duration) uint64 {
tn := t.UnixNano()
resetAfter := c.resetAt.Load()
if resetAfter > tn {
return c.counter.Inc()
return c.counter.Add(1)
}
c.counter.Store(1)
newResetAfter := tn + tick.Nanoseconds()
if !c.resetAt.CAS(resetAfter, newResetAfter) {
if !c.resetAt.CompareAndSwap(resetAfter, newResetAfter) {
// We raced with another goroutine trying to reset, and it also reset
// the counter to 1, so we need to reincrement the counter.
return c.counter.Inc()
return c.counter.Add(1)
}
return 1