forked from ebhomengo/niki
1
0
Fork 0
niki/vendor/github.com/mitchellh/copystructure/copystructure.go

1045 lines
16 KiB
Go
Raw Normal View History

2024-02-18 10:42:21 +00:00
package copystructure
import (
"errors"
"reflect"
"sync"
"github.com/mitchellh/reflectwalk"
)
const tagKey = "copy"
// Copy returns a deep copy of v.
2024-02-18 10:42:21 +00:00
//
2024-02-18 10:42:21 +00:00
// Copy is unable to copy unexported fields in a struct (lowercase field names).
2024-02-18 10:42:21 +00:00
// Unexported fields can't be reflected by the Go runtime and therefore
2024-02-18 10:42:21 +00:00
// copystructure can't perform any data copies.
2024-02-18 10:42:21 +00:00
//
2024-02-18 10:42:21 +00:00
// For structs, copy behavior can be controlled with struct tags. For example:
2024-02-18 10:42:21 +00:00
//
2024-06-14 08:41:36 +00:00
// struct {
2024-06-14 08:41:36 +00:00
// Name string
2024-06-14 08:41:36 +00:00
// Data *bytes.Buffer `copy:"shallow"`
2024-06-14 08:41:36 +00:00
// }
2024-02-18 10:42:21 +00:00
//
2024-02-18 10:42:21 +00:00
// The available tag values are:
2024-02-18 10:42:21 +00:00
//
2024-06-14 08:41:36 +00:00
// - "ignore" - The field will be ignored, effectively resulting in it being
2024-06-14 08:41:36 +00:00
// assigned the zero value in the copy.
2024-02-18 10:42:21 +00:00
//
2024-06-14 08:41:36 +00:00
// - "shallow" - The field will be be shallow copied. This means that references
2024-06-14 08:41:36 +00:00
// values such as pointers, maps, slices, etc. will be directly assigned
2024-06-14 08:41:36 +00:00
// versus deep copied.
2024-02-18 10:42:21 +00:00
func Copy(v interface{}) (interface{}, error) {
2024-02-18 10:42:21 +00:00
return Config{}.Copy(v)
2024-02-18 10:42:21 +00:00
}
// CopierFunc is a function that knows how to deep copy a specific type.
2024-02-18 10:42:21 +00:00
// Register these globally with the Copiers variable.
2024-02-18 10:42:21 +00:00
type CopierFunc func(interface{}) (interface{}, error)
// Copiers is a map of types that behave specially when they are copied.
2024-02-18 10:42:21 +00:00
// If a type is found in this map while deep copying, this function
2024-02-18 10:42:21 +00:00
// will be called to copy it instead of attempting to copy all fields.
2024-02-18 10:42:21 +00:00
//
2024-02-18 10:42:21 +00:00
// The key should be the type, obtained using: reflect.TypeOf(value with type).
2024-02-18 10:42:21 +00:00
//
2024-02-18 10:42:21 +00:00
// It is unsafe to write to this map after Copies have started. If you
2024-02-18 10:42:21 +00:00
// are writing to this map while also copying, wrap all modifications to
2024-02-18 10:42:21 +00:00
// this map as well as to Copy in a mutex.
2024-02-18 10:42:21 +00:00
var Copiers map[reflect.Type]CopierFunc = make(map[reflect.Type]CopierFunc)
// ShallowCopiers is a map of pointer types that behave specially
2024-02-18 10:42:21 +00:00
// when they are copied. If a type is found in this map while deep
2024-02-18 10:42:21 +00:00
// copying, the pointer value will be shallow copied and not walked
2024-02-18 10:42:21 +00:00
// into.
2024-02-18 10:42:21 +00:00
//
2024-02-18 10:42:21 +00:00
// The key should be the type, obtained using: reflect.TypeOf(value
2024-02-18 10:42:21 +00:00
// with type).
2024-02-18 10:42:21 +00:00
//
2024-02-18 10:42:21 +00:00
// It is unsafe to write to this map after Copies have started. If you
2024-02-18 10:42:21 +00:00
// are writing to this map while also copying, wrap all modifications to
2024-02-18 10:42:21 +00:00
// this map as well as to Copy in a mutex.
2024-02-18 10:42:21 +00:00
var ShallowCopiers map[reflect.Type]struct{} = make(map[reflect.Type]struct{})
// Must is a helper that wraps a call to a function returning
2024-02-18 10:42:21 +00:00
// (interface{}, error) and panics if the error is non-nil. It is intended
2024-02-18 10:42:21 +00:00
// for use in variable initializations and should only be used when a copy
2024-02-18 10:42:21 +00:00
// error should be a crashing case.
2024-02-18 10:42:21 +00:00
func Must(v interface{}, err error) interface{} {
2024-02-18 10:42:21 +00:00
if err != nil {
2024-02-18 10:42:21 +00:00
panic("copy error: " + err.Error())
2024-02-18 10:42:21 +00:00
}
return v
2024-02-18 10:42:21 +00:00
}
var errPointerRequired = errors.New("Copy argument must be a pointer when Lock is true")
type Config struct {
2024-02-18 10:42:21 +00:00
// Lock any types that are a sync.Locker and are not a mutex while copying.
2024-02-18 10:42:21 +00:00
// If there is an RLocker method, use that to get the sync.Locker.
2024-02-18 10:42:21 +00:00
Lock bool
// Copiers is a map of types associated with a CopierFunc. Use the global
2024-02-18 10:42:21 +00:00
// Copiers map if this is nil.
2024-02-18 10:42:21 +00:00
Copiers map[reflect.Type]CopierFunc
// ShallowCopiers is a map of pointer types that when they are
2024-02-18 10:42:21 +00:00
// shallow copied no matter where they are encountered. Use the
2024-02-18 10:42:21 +00:00
// global ShallowCopiers if this is nil.
2024-02-18 10:42:21 +00:00
ShallowCopiers map[reflect.Type]struct{}
}
func (c Config) Copy(v interface{}) (interface{}, error) {
2024-02-18 10:42:21 +00:00
if c.Lock && reflect.ValueOf(v).Kind() != reflect.Ptr {
2024-02-18 10:42:21 +00:00
return nil, errPointerRequired
2024-02-18 10:42:21 +00:00
}
w := new(walker)
2024-02-18 10:42:21 +00:00
if c.Lock {
2024-02-18 10:42:21 +00:00
w.useLocks = true
2024-02-18 10:42:21 +00:00
}
if c.Copiers == nil {
2024-02-18 10:42:21 +00:00
c.Copiers = Copiers
2024-02-18 10:42:21 +00:00
}
2024-02-18 10:42:21 +00:00
w.copiers = c.Copiers
if c.ShallowCopiers == nil {
2024-02-18 10:42:21 +00:00
c.ShallowCopiers = ShallowCopiers
2024-02-18 10:42:21 +00:00
}
2024-02-18 10:42:21 +00:00
w.shallowCopiers = c.ShallowCopiers
err := reflectwalk.Walk(v, w)
2024-02-18 10:42:21 +00:00
if err != nil {
2024-02-18 10:42:21 +00:00
return nil, err
2024-02-18 10:42:21 +00:00
}
// Get the result. If the result is nil, then we want to turn it
2024-02-18 10:42:21 +00:00
// into a typed nil if we can.
2024-02-18 10:42:21 +00:00
result := w.Result
2024-02-18 10:42:21 +00:00
if result == nil {
2024-02-18 10:42:21 +00:00
val := reflect.ValueOf(v)
2024-02-18 10:42:21 +00:00
result = reflect.Indirect(reflect.New(val.Type())).Interface()
2024-02-18 10:42:21 +00:00
}
return result, nil
2024-02-18 10:42:21 +00:00
}
// Return the key used to index interfaces types we've seen. Store the number
2024-02-18 10:42:21 +00:00
// of pointers in the upper 32bits, and the depth in the lower 32bits. This is
2024-02-18 10:42:21 +00:00
// easy to calculate, easy to match a key with our current depth, and we don't
2024-02-18 10:42:21 +00:00
// need to deal with initializing and cleaning up nested maps or slices.
2024-02-18 10:42:21 +00:00
func ifaceKey(pointers, depth int) uint64 {
2024-02-18 10:42:21 +00:00
return uint64(pointers)<<32 | uint64(depth)
2024-02-18 10:42:21 +00:00
}
type walker struct {
Result interface{}
copiers map[reflect.Type]CopierFunc
2024-02-18 10:42:21 +00:00
shallowCopiers map[reflect.Type]struct{}
depth int
ignoreDepth int
vals []reflect.Value
cs []reflect.Value
2024-02-18 10:42:21 +00:00
// This stores the number of pointers we've walked over, indexed by depth.
2024-02-18 10:42:21 +00:00
ps []int
// If an interface is indirected by a pointer, we need to know the type of
2024-02-18 10:42:21 +00:00
// interface to create when creating the new value. Store the interface
2024-02-18 10:42:21 +00:00
// types here, indexed by both the walk depth and the number of pointers
2024-02-18 10:42:21 +00:00
// already seen at that depth. Use ifaceKey to calculate the proper uint64
2024-02-18 10:42:21 +00:00
// value.
2024-02-18 10:42:21 +00:00
ifaceTypes map[uint64]reflect.Type
// any locks we've taken, indexed by depth
2024-02-18 10:42:21 +00:00
locks []sync.Locker
2024-02-18 10:42:21 +00:00
// take locks while walking the structure
2024-02-18 10:42:21 +00:00
useLocks bool
}
func (w *walker) Enter(l reflectwalk.Location) error {
2024-02-18 10:42:21 +00:00
w.depth++
// ensure we have enough elements to index via w.depth
2024-02-18 10:42:21 +00:00
for w.depth >= len(w.locks) {
2024-02-18 10:42:21 +00:00
w.locks = append(w.locks, nil)
2024-02-18 10:42:21 +00:00
}
for len(w.ps) < w.depth+1 {
2024-02-18 10:42:21 +00:00
w.ps = append(w.ps, 0)
2024-02-18 10:42:21 +00:00
}
return nil
2024-02-18 10:42:21 +00:00
}
func (w *walker) Exit(l reflectwalk.Location) error {
2024-02-18 10:42:21 +00:00
locker := w.locks[w.depth]
2024-02-18 10:42:21 +00:00
w.locks[w.depth] = nil
2024-02-18 10:42:21 +00:00
if locker != nil {
2024-02-18 10:42:21 +00:00
defer locker.Unlock()
2024-02-18 10:42:21 +00:00
}
// clear out pointers and interfaces as we exit the stack
2024-02-18 10:42:21 +00:00
w.ps[w.depth] = 0
for k := range w.ifaceTypes {
2024-02-18 10:42:21 +00:00
mask := uint64(^uint32(0))
2024-02-18 10:42:21 +00:00
if k&mask == uint64(w.depth) {
2024-02-18 10:42:21 +00:00
delete(w.ifaceTypes, k)
2024-02-18 10:42:21 +00:00
}
2024-02-18 10:42:21 +00:00
}
w.depth--
2024-02-18 10:42:21 +00:00
if w.ignoreDepth > w.depth {
2024-02-18 10:42:21 +00:00
w.ignoreDepth = 0
2024-02-18 10:42:21 +00:00
}
if w.ignoring() {
2024-02-18 10:42:21 +00:00
return nil
2024-02-18 10:42:21 +00:00
}
switch l {
2024-02-18 10:42:21 +00:00
case reflectwalk.Array:
2024-02-18 10:42:21 +00:00
fallthrough
2024-02-18 10:42:21 +00:00
case reflectwalk.Map:
2024-02-18 10:42:21 +00:00
fallthrough
2024-02-18 10:42:21 +00:00
case reflectwalk.Slice:
2024-02-18 10:42:21 +00:00
w.replacePointerMaybe()
// Pop map off our container
2024-02-18 10:42:21 +00:00
w.cs = w.cs[:len(w.cs)-1]
2024-02-18 10:42:21 +00:00
case reflectwalk.MapValue:
2024-02-18 10:42:21 +00:00
// Pop off the key and value
2024-02-18 10:42:21 +00:00
mv := w.valPop()
2024-02-18 10:42:21 +00:00
mk := w.valPop()
2024-02-18 10:42:21 +00:00
m := w.cs[len(w.cs)-1]
// If mv is the zero value, SetMapIndex deletes the key form the map,
2024-02-18 10:42:21 +00:00
// or in this case never adds it. We need to create a properly typed
2024-02-18 10:42:21 +00:00
// zero value so that this key can be set.
2024-02-18 10:42:21 +00:00
if !mv.IsValid() {
2024-02-18 10:42:21 +00:00
mv = reflect.Zero(m.Elem().Type().Elem())
2024-02-18 10:42:21 +00:00
}
2024-02-18 10:42:21 +00:00
m.Elem().SetMapIndex(mk, mv)
2024-02-18 10:42:21 +00:00
case reflectwalk.ArrayElem:
2024-02-18 10:42:21 +00:00
// Pop off the value and the index and set it on the array
2024-02-18 10:42:21 +00:00
v := w.valPop()
2024-02-18 10:42:21 +00:00
i := w.valPop().Interface().(int)
2024-02-18 10:42:21 +00:00
if v.IsValid() {
2024-02-18 10:42:21 +00:00
a := w.cs[len(w.cs)-1]
2024-02-18 10:42:21 +00:00
ae := a.Elem().Index(i) // storing array as pointer on stack - so need Elem() call
2024-02-18 10:42:21 +00:00
if ae.CanSet() {
2024-02-18 10:42:21 +00:00
ae.Set(v)
2024-02-18 10:42:21 +00:00
}
2024-02-18 10:42:21 +00:00
}
2024-02-18 10:42:21 +00:00
case reflectwalk.SliceElem:
2024-02-18 10:42:21 +00:00
// Pop off the value and the index and set it on the slice
2024-02-18 10:42:21 +00:00
v := w.valPop()
2024-02-18 10:42:21 +00:00
i := w.valPop().Interface().(int)
2024-02-18 10:42:21 +00:00
if v.IsValid() {
2024-02-18 10:42:21 +00:00
s := w.cs[len(w.cs)-1]
2024-02-18 10:42:21 +00:00
se := s.Elem().Index(i)
2024-02-18 10:42:21 +00:00
if se.CanSet() {
2024-02-18 10:42:21 +00:00
se.Set(v)
2024-02-18 10:42:21 +00:00
}
2024-02-18 10:42:21 +00:00
}
2024-02-18 10:42:21 +00:00
case reflectwalk.Struct:
2024-02-18 10:42:21 +00:00
w.replacePointerMaybe()
// Remove the struct from the container stack
2024-02-18 10:42:21 +00:00
w.cs = w.cs[:len(w.cs)-1]
2024-02-18 10:42:21 +00:00
case reflectwalk.StructField:
2024-02-18 10:42:21 +00:00
// Pop off the value and the field
2024-02-18 10:42:21 +00:00
v := w.valPop()
2024-02-18 10:42:21 +00:00
f := w.valPop().Interface().(reflect.StructField)
2024-02-18 10:42:21 +00:00
if v.IsValid() {
2024-02-18 10:42:21 +00:00
s := w.cs[len(w.cs)-1]
2024-02-18 10:42:21 +00:00
sf := reflect.Indirect(s).FieldByName(f.Name)
if sf.CanSet() {
2024-02-18 10:42:21 +00:00
sf.Set(v)
2024-02-18 10:42:21 +00:00
}
2024-02-18 10:42:21 +00:00
}
2024-02-18 10:42:21 +00:00
case reflectwalk.WalkLoc:
2024-02-18 10:42:21 +00:00
// Clear out the slices for GC
2024-02-18 10:42:21 +00:00
w.cs = nil
2024-02-18 10:42:21 +00:00
w.vals = nil
2024-02-18 10:42:21 +00:00
}
return nil
2024-02-18 10:42:21 +00:00
}
func (w *walker) Map(m reflect.Value) error {
2024-02-18 10:42:21 +00:00
if w.ignoring() {
2024-02-18 10:42:21 +00:00
return nil
2024-02-18 10:42:21 +00:00
}
2024-02-18 10:42:21 +00:00
w.lock(m)
// Create the map. If the map itself is nil, then just make a nil map
2024-02-18 10:42:21 +00:00
var newMap reflect.Value
2024-02-18 10:42:21 +00:00
if m.IsNil() {
2024-02-18 10:42:21 +00:00
newMap = reflect.New(m.Type())
2024-02-18 10:42:21 +00:00
} else {
2024-02-18 10:42:21 +00:00
newMap = wrapPtr(reflect.MakeMap(m.Type()))
2024-02-18 10:42:21 +00:00
}
w.cs = append(w.cs, newMap)
2024-02-18 10:42:21 +00:00
w.valPush(newMap)
2024-02-18 10:42:21 +00:00
return nil
2024-02-18 10:42:21 +00:00
}
func (w *walker) MapElem(m, k, v reflect.Value) error {
2024-02-18 10:42:21 +00:00
return nil
2024-02-18 10:42:21 +00:00
}
func (w *walker) PointerEnter(v bool) error {
2024-02-18 10:42:21 +00:00
if v {
2024-02-18 10:42:21 +00:00
w.ps[w.depth]++
2024-02-18 10:42:21 +00:00
}
2024-02-18 10:42:21 +00:00
return nil
2024-02-18 10:42:21 +00:00
}
func (w *walker) PointerExit(v bool) error {
2024-02-18 10:42:21 +00:00
if v {
2024-02-18 10:42:21 +00:00
w.ps[w.depth]--
2024-02-18 10:42:21 +00:00
}
2024-02-18 10:42:21 +00:00
return nil
2024-02-18 10:42:21 +00:00
}
func (w *walker) Pointer(v reflect.Value) error {
2024-02-18 10:42:21 +00:00
if _, ok := w.shallowCopiers[v.Type()]; ok {
2024-02-18 10:42:21 +00:00
// Shallow copy this value. Use the same logic as primitive, then
2024-02-18 10:42:21 +00:00
// return skip.
2024-02-18 10:42:21 +00:00
if err := w.Primitive(v); err != nil {
2024-02-18 10:42:21 +00:00
return err
2024-02-18 10:42:21 +00:00
}
return reflectwalk.SkipEntry
2024-02-18 10:42:21 +00:00
}
return nil
2024-02-18 10:42:21 +00:00
}
func (w *walker) Interface(v reflect.Value) error {
2024-02-18 10:42:21 +00:00
if !v.IsValid() {
2024-02-18 10:42:21 +00:00
return nil
2024-02-18 10:42:21 +00:00
}
2024-02-18 10:42:21 +00:00
if w.ifaceTypes == nil {
2024-02-18 10:42:21 +00:00
w.ifaceTypes = make(map[uint64]reflect.Type)
2024-02-18 10:42:21 +00:00
}
w.ifaceTypes[ifaceKey(w.ps[w.depth], w.depth)] = v.Type()
2024-02-18 10:42:21 +00:00
return nil
2024-02-18 10:42:21 +00:00
}
func (w *walker) Primitive(v reflect.Value) error {
2024-02-18 10:42:21 +00:00
if w.ignoring() {
2024-02-18 10:42:21 +00:00
return nil
2024-02-18 10:42:21 +00:00
}
2024-02-18 10:42:21 +00:00
w.lock(v)
// IsValid verifies the v is non-zero and CanInterface verifies
2024-02-18 10:42:21 +00:00
// that we're allowed to read this value (unexported fields).
2024-02-18 10:42:21 +00:00
var newV reflect.Value
2024-02-18 10:42:21 +00:00
if v.IsValid() && v.CanInterface() {
2024-02-18 10:42:21 +00:00
newV = reflect.New(v.Type())
2024-02-18 10:42:21 +00:00
newV.Elem().Set(v)
2024-02-18 10:42:21 +00:00
}
w.valPush(newV)
2024-02-18 10:42:21 +00:00
w.replacePointerMaybe()
2024-02-18 10:42:21 +00:00
return nil
2024-02-18 10:42:21 +00:00
}
func (w *walker) Slice(s reflect.Value) error {
2024-02-18 10:42:21 +00:00
if w.ignoring() {
2024-02-18 10:42:21 +00:00
return nil
2024-02-18 10:42:21 +00:00
}
2024-02-18 10:42:21 +00:00
w.lock(s)
var newS reflect.Value
2024-02-18 10:42:21 +00:00
if s.IsNil() {
2024-02-18 10:42:21 +00:00
newS = reflect.New(s.Type())
2024-02-18 10:42:21 +00:00
} else {
2024-02-18 10:42:21 +00:00
newS = wrapPtr(reflect.MakeSlice(s.Type(), s.Len(), s.Cap()))
2024-02-18 10:42:21 +00:00
}
w.cs = append(w.cs, newS)
2024-02-18 10:42:21 +00:00
w.valPush(newS)
2024-02-18 10:42:21 +00:00
return nil
2024-02-18 10:42:21 +00:00
}
func (w *walker) SliceElem(i int, elem reflect.Value) error {
2024-02-18 10:42:21 +00:00
if w.ignoring() {
2024-02-18 10:42:21 +00:00
return nil
2024-02-18 10:42:21 +00:00
}
// We don't write the slice here because elem might still be
2024-02-18 10:42:21 +00:00
// arbitrarily complex. Just record the index and continue on.
2024-02-18 10:42:21 +00:00
w.valPush(reflect.ValueOf(i))
return nil
2024-02-18 10:42:21 +00:00
}
func (w *walker) Array(a reflect.Value) error {
2024-02-18 10:42:21 +00:00
if w.ignoring() {
2024-02-18 10:42:21 +00:00
return nil
2024-02-18 10:42:21 +00:00
}
2024-02-18 10:42:21 +00:00
w.lock(a)
newA := reflect.New(a.Type())
w.cs = append(w.cs, newA)
2024-02-18 10:42:21 +00:00
w.valPush(newA)
2024-02-18 10:42:21 +00:00
return nil
2024-02-18 10:42:21 +00:00
}
func (w *walker) ArrayElem(i int, elem reflect.Value) error {
2024-02-18 10:42:21 +00:00
if w.ignoring() {
2024-02-18 10:42:21 +00:00
return nil
2024-02-18 10:42:21 +00:00
}
// We don't write the array here because elem might still be
2024-02-18 10:42:21 +00:00
// arbitrarily complex. Just record the index and continue on.
2024-02-18 10:42:21 +00:00
w.valPush(reflect.ValueOf(i))
return nil
2024-02-18 10:42:21 +00:00
}
func (w *walker) Struct(s reflect.Value) error {
2024-02-18 10:42:21 +00:00
if w.ignoring() {
2024-02-18 10:42:21 +00:00
return nil
2024-02-18 10:42:21 +00:00
}
2024-02-18 10:42:21 +00:00
w.lock(s)
var v reflect.Value
2024-02-18 10:42:21 +00:00
if c, ok := w.copiers[s.Type()]; ok {
2024-02-18 10:42:21 +00:00
// We have a Copier for this struct, so we use that copier to
2024-02-18 10:42:21 +00:00
// get the copy, and we ignore anything deeper than this.
2024-02-18 10:42:21 +00:00
w.ignoreDepth = w.depth
dup, err := c(s.Interface())
2024-02-18 10:42:21 +00:00
if err != nil {
2024-02-18 10:42:21 +00:00
return err
2024-02-18 10:42:21 +00:00
}
// We need to put a pointer to the value on the value stack,
2024-02-18 10:42:21 +00:00
// so allocate a new pointer and set it.
2024-02-18 10:42:21 +00:00
v = reflect.New(s.Type())
2024-02-18 10:42:21 +00:00
reflect.Indirect(v).Set(reflect.ValueOf(dup))
2024-02-18 10:42:21 +00:00
} else {
2024-02-18 10:42:21 +00:00
// No copier, we copy ourselves and allow reflectwalk to guide
2024-02-18 10:42:21 +00:00
// us deeper into the structure for copying.
2024-02-18 10:42:21 +00:00
v = reflect.New(s.Type())
2024-02-18 10:42:21 +00:00
}
// Push the value onto the value stack for setting the struct field,
2024-02-18 10:42:21 +00:00
// and add the struct itself to the containers stack in case we walk
2024-02-18 10:42:21 +00:00
// deeper so that its own fields can be modified.
2024-02-18 10:42:21 +00:00
w.valPush(v)
2024-02-18 10:42:21 +00:00
w.cs = append(w.cs, v)
return nil
2024-02-18 10:42:21 +00:00
}
func (w *walker) StructField(f reflect.StructField, v reflect.Value) error {
2024-02-18 10:42:21 +00:00
if w.ignoring() {
2024-02-18 10:42:21 +00:00
return nil
2024-02-18 10:42:21 +00:00
}
// If PkgPath is non-empty, this is a private (unexported) field.
2024-02-18 10:42:21 +00:00
// We do not set this unexported since the Go runtime doesn't allow us.
2024-02-18 10:42:21 +00:00
if f.PkgPath != "" {
2024-02-18 10:42:21 +00:00
return reflectwalk.SkipEntry
2024-02-18 10:42:21 +00:00
}
switch f.Tag.Get(tagKey) {
2024-02-18 10:42:21 +00:00
case "shallow":
2024-02-18 10:42:21 +00:00
// If we're shallow copying then assign the value directly to the
2024-02-18 10:42:21 +00:00
// struct and skip the entry.
2024-02-18 10:42:21 +00:00
if v.IsValid() {
2024-02-18 10:42:21 +00:00
s := w.cs[len(w.cs)-1]
2024-02-18 10:42:21 +00:00
sf := reflect.Indirect(s).FieldByName(f.Name)
2024-02-18 10:42:21 +00:00
if sf.CanSet() {
2024-02-18 10:42:21 +00:00
sf.Set(v)
2024-02-18 10:42:21 +00:00
}
2024-02-18 10:42:21 +00:00
}
return reflectwalk.SkipEntry
case "ignore":
2024-02-18 10:42:21 +00:00
// Do nothing
2024-02-18 10:42:21 +00:00
return reflectwalk.SkipEntry
2024-02-18 10:42:21 +00:00
}
// Push the field onto the stack, we'll handle it when we exit
2024-02-18 10:42:21 +00:00
// the struct field in Exit...
2024-02-18 10:42:21 +00:00
w.valPush(reflect.ValueOf(f))
return nil
2024-02-18 10:42:21 +00:00
}
// ignore causes the walker to ignore any more values until we exit this on
2024-02-18 10:42:21 +00:00
func (w *walker) ignore() {
2024-02-18 10:42:21 +00:00
w.ignoreDepth = w.depth
2024-02-18 10:42:21 +00:00
}
func (w *walker) ignoring() bool {
2024-02-18 10:42:21 +00:00
return w.ignoreDepth > 0 && w.depth >= w.ignoreDepth
2024-02-18 10:42:21 +00:00
}
func (w *walker) pointerPeek() bool {
2024-02-18 10:42:21 +00:00
return w.ps[w.depth] > 0
2024-02-18 10:42:21 +00:00
}
func (w *walker) valPop() reflect.Value {
2024-02-18 10:42:21 +00:00
result := w.vals[len(w.vals)-1]
2024-02-18 10:42:21 +00:00
w.vals = w.vals[:len(w.vals)-1]
// If we're out of values, that means we popped everything off. In
2024-02-18 10:42:21 +00:00
// this case, we reset the result so the next pushed value becomes
2024-02-18 10:42:21 +00:00
// the result.
2024-02-18 10:42:21 +00:00
if len(w.vals) == 0 {
2024-02-18 10:42:21 +00:00
w.Result = nil
2024-02-18 10:42:21 +00:00
}
return result
2024-02-18 10:42:21 +00:00
}
func (w *walker) valPush(v reflect.Value) {
2024-02-18 10:42:21 +00:00
w.vals = append(w.vals, v)
// If we haven't set the result yet, then this is the result since
2024-02-18 10:42:21 +00:00
// it is the first (outermost) value we're seeing.
2024-02-18 10:42:21 +00:00
if w.Result == nil && v.IsValid() {
2024-02-18 10:42:21 +00:00
w.Result = v.Interface()
2024-02-18 10:42:21 +00:00
}
2024-02-18 10:42:21 +00:00
}
func (w *walker) replacePointerMaybe() {
2024-02-18 10:42:21 +00:00
// Determine the last pointer value. If it is NOT a pointer, then
2024-02-18 10:42:21 +00:00
// we need to push that onto the stack.
2024-02-18 10:42:21 +00:00
if !w.pointerPeek() {
2024-02-18 10:42:21 +00:00
w.valPush(reflect.Indirect(w.valPop()))
2024-02-18 10:42:21 +00:00
return
2024-02-18 10:42:21 +00:00
}
v := w.valPop()
// If the expected type is a pointer to an interface of any depth,
2024-02-18 10:42:21 +00:00
// such as *interface{}, **interface{}, etc., then we need to convert
2024-02-18 10:42:21 +00:00
// the value "v" from *CONCRETE to *interface{} so types match for
2024-02-18 10:42:21 +00:00
// Set.
2024-02-18 10:42:21 +00:00
//
2024-02-18 10:42:21 +00:00
// Example if v is type *Foo where Foo is a struct, v would become
2024-02-18 10:42:21 +00:00
// *interface{} instead. This only happens if we have an interface expectation
2024-02-18 10:42:21 +00:00
// at this depth.
2024-02-18 10:42:21 +00:00
//
2024-02-18 10:42:21 +00:00
// For more info, see GH-16
2024-02-18 10:42:21 +00:00
if iType, ok := w.ifaceTypes[ifaceKey(w.ps[w.depth], w.depth)]; ok && iType.Kind() == reflect.Interface {
y := reflect.New(iType) // Create *interface{}
2024-02-18 10:42:21 +00:00
y.Elem().Set(reflect.Indirect(v)) // Assign "Foo" to interface{} (dereferenced)
v = y // v is now typed *interface{} (where *v = Foo)
2024-02-18 10:42:21 +00:00
}
for i := 1; i < w.ps[w.depth]; i++ {
2024-02-18 10:42:21 +00:00
if iType, ok := w.ifaceTypes[ifaceKey(w.ps[w.depth]-i, w.depth)]; ok {
2024-02-18 10:42:21 +00:00
iface := reflect.New(iType).Elem()
2024-02-18 10:42:21 +00:00
iface.Set(v)
2024-02-18 10:42:21 +00:00
v = iface
2024-02-18 10:42:21 +00:00
}
p := reflect.New(v.Type())
2024-02-18 10:42:21 +00:00
p.Elem().Set(v)
2024-02-18 10:42:21 +00:00
v = p
2024-02-18 10:42:21 +00:00
}
w.valPush(v)
2024-02-18 10:42:21 +00:00
}
// if this value is a Locker, lock it and add it to the locks slice
2024-02-18 10:42:21 +00:00
func (w *walker) lock(v reflect.Value) {
2024-02-18 10:42:21 +00:00
if !w.useLocks {
2024-02-18 10:42:21 +00:00
return
2024-02-18 10:42:21 +00:00
}
if !v.IsValid() || !v.CanInterface() {
2024-02-18 10:42:21 +00:00
return
2024-02-18 10:42:21 +00:00
}
type rlocker interface {
RLocker() sync.Locker
}
var locker sync.Locker
// We can't call Interface() on a value directly, since that requires
2024-02-18 10:42:21 +00:00
// a copy. This is OK, since the pointer to a value which is a sync.Locker
2024-02-18 10:42:21 +00:00
// is also a sync.Locker.
2024-02-18 10:42:21 +00:00
if v.Kind() == reflect.Ptr {
2024-02-18 10:42:21 +00:00
switch l := v.Interface().(type) {
2024-02-18 10:42:21 +00:00
case rlocker:
2024-02-18 10:42:21 +00:00
// don't lock a mutex directly
2024-02-18 10:42:21 +00:00
if _, ok := l.(*sync.RWMutex); !ok {
2024-02-18 10:42:21 +00:00
locker = l.RLocker()
2024-02-18 10:42:21 +00:00
}
2024-02-18 10:42:21 +00:00
case sync.Locker:
2024-02-18 10:42:21 +00:00
locker = l
2024-02-18 10:42:21 +00:00
}
2024-02-18 10:42:21 +00:00
} else if v.CanAddr() {
2024-02-18 10:42:21 +00:00
switch l := v.Addr().Interface().(type) {
2024-02-18 10:42:21 +00:00
case rlocker:
2024-02-18 10:42:21 +00:00
// don't lock a mutex directly
2024-02-18 10:42:21 +00:00
if _, ok := l.(*sync.RWMutex); !ok {
2024-02-18 10:42:21 +00:00
locker = l.RLocker()
2024-02-18 10:42:21 +00:00
}
2024-02-18 10:42:21 +00:00
case sync.Locker:
2024-02-18 10:42:21 +00:00
locker = l
2024-02-18 10:42:21 +00:00
}
2024-02-18 10:42:21 +00:00
}
// still no callable locker
2024-02-18 10:42:21 +00:00
if locker == nil {
2024-02-18 10:42:21 +00:00
return
2024-02-18 10:42:21 +00:00
}
// don't lock a mutex directly
2024-02-18 10:42:21 +00:00
switch locker.(type) {
2024-02-18 10:42:21 +00:00
case *sync.Mutex, *sync.RWMutex:
2024-02-18 10:42:21 +00:00
return
2024-02-18 10:42:21 +00:00
}
locker.Lock()
2024-02-18 10:42:21 +00:00
w.locks[w.depth] = locker
2024-02-18 10:42:21 +00:00
}
// wrapPtr is a helper that takes v and always make it *v. copystructure
2024-02-18 10:42:21 +00:00
// stores things internally as pointers until the last moment before unwrapping
2024-02-18 10:42:21 +00:00
func wrapPtr(v reflect.Value) reflect.Value {
2024-02-18 10:42:21 +00:00
if !v.IsValid() {
2024-02-18 10:42:21 +00:00
return v
2024-02-18 10:42:21 +00:00
}
2024-02-18 10:42:21 +00:00
vPtr := reflect.New(v.Type())
2024-02-18 10:42:21 +00:00
vPtr.Elem().Set(v)
2024-02-18 10:42:21 +00:00
return vPtr
2024-02-18 10:42:21 +00:00
}