util/deephash: always keep values addressable (#5328)

The logic of deephash is both simpler and easier to reason about
if values are always addressable.

In Go, the composite kinds are slices, arrays, maps, structs,
interfaces, pointers, channels, and functions,
where we define "composite" as a Go value that encapsulates
some other Go value (e.g., a map is a collection of key-value entries).

In the cases of pointers and slices, the sub-values are always addressable.

In the cases of arrays and structs, the sub-values are always addressable
if and only if the parent value is addressable.

In the case of maps and interfaces, the sub-values are never addressable.
To make them addressable, we need to copy them onto the heap.

For the purposes of deephash, we do not care about channels and functions.

For all non-composite kinds (e.g., strings and ints), they are only addressable
if obtained from one of the composite kinds that produce addressable values
(i.e., pointers, slices, addressable arrays, and addressable structs).
A non-addressible, non-composite kind can be made addressable by
allocating it on the heap, obtaining a pointer to it, and dereferencing it.

Thus, if we can ensure that values are addressable at the entry points,
and shallow copy sub-values whenever we encounter an interface or map,
then we can ensure that all values are always addressable and
assume such property throughout all the logic.

Performance:

	name                 old time/op    new time/op    delta
	Hash-24                21.5µs ± 1%    19.7µs ± 1%  -8.29%  (p=0.000 n=9+9)
	HashPacketFilter-24    2.61µs ± 1%    2.62µs ± 0%  +0.29%  (p=0.037 n=10+9)
	HashMapAcyclic-24      30.8µs ± 1%    30.9µs ± 1%    ~     (p=0.400 n=9+10)
	TailcfgNode-24         1.84µs ± 1%    1.84µs ± 2%    ~     (p=0.928 n=10+10)
	HashArray-24            324ns ± 2%     332ns ± 2%  +2.45%  (p=0.000 n=10+10)

Signed-off-by: Joe Tsai <joetsai@digital-static.net>
pull/5333/head
Joe Tsai 2 years ago committed by GitHub
parent 4ee64681ad
commit 539c5e44c5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -61,6 +61,19 @@ import (
// theoretically "parsable" by looking up the hash in a magical map that // theoretically "parsable" by looking up the hash in a magical map that
// returns the set of entries for that given hash. // returns the set of entries for that given hash.
// addressableValue is a reflect.Value that is guaranteed to be addressable
// such that calling the Addr and Set methods do not panic.
//
// There is no compile magic that enforces this property,
// but rather the need to construct this type makes it easier to examine each
// construction site to ensure that this property is upheld.
type addressableValue struct{ reflect.Value }
// newAddressableValue constructs a new addressable value of type t.
func newAddressableValue(t reflect.Type) addressableValue {
return addressableValue{reflect.New(t).Elem()} // dereferenced pointer is always addressable
}
const scratchSize = 128 const scratchSize = 128
// hasher is reusable state for hashing a value. // hasher is reusable state for hashing a value.
@ -122,6 +135,7 @@ var hasherPool = &sync.Pool{
} }
// Hash returns the hash of v. // Hash returns the hash of v.
// For performance, this should be a non-nil pointer.
func Hash(v any) (s Sum) { func Hash(v any) (s Sum) {
h := hasherPool.Get().(*hasher) h := hasherPool.Get().(*hasher)
defer hasherPool.Put(h) defer hasherPool.Put(h)
@ -131,14 +145,22 @@ func Hash(v any) (s Sum) {
rv := reflect.ValueOf(v) rv := reflect.ValueOf(v)
if rv.IsValid() { if rv.IsValid() {
var va addressableValue
if rv.Kind() == reflect.Pointer && !rv.IsNil() {
va = addressableValue{rv.Elem()} // dereferenced pointer is always addressable
} else {
va = newAddressableValue(rv.Type())
va.Set(rv)
}
// Always treat the Hash input as an interface (it is), including hashing // Always treat the Hash input as an interface (it is), including hashing
// its type, otherwise two Hash calls of different types could hash to the // its type, otherwise two Hash calls of different types could hash to the
// same bytes off the different types and get equivalent Sum values. This is // same bytes off the different types and get equivalent Sum values. This is
// the same thing that we do for reflect.Kind Interface in hashValue, but // the same thing that we do for reflect.Kind Interface in hashValue, but
// the initial reflect.ValueOf from an interface value effectively strips // the initial reflect.ValueOf from an interface value effectively strips
// the interface box off so we have to do it at the top level by hand. // the interface box off so we have to do it at the top level by hand.
h.hashType(rv.Type()) h.hashType(va.Type())
h.hashValue(rv, false) h.hashValue(va, false)
} }
return h.sum() return h.sum()
} }
@ -147,7 +169,12 @@ func Hash(v any) (s Sum) {
// the provided reflect type, avoiding a map lookup per value. // the provided reflect type, avoiding a map lookup per value.
func HasherForType[T any]() func(T) Sum { func HasherForType[T any]() func(T) Sum {
var zeroT T var zeroT T
ti := getTypeInfo(reflect.TypeOf(zeroT)) t := reflect.TypeOf(zeroT)
ti := getTypeInfo(t)
var tiElem *typeInfo
if t.Kind() == reflect.Pointer {
tiElem = getTypeInfo(t.Elem())
}
seedOnce.Do(initSeed) seedOnce.Do(initSeed)
return func(v T) Sum { return func(v T) Sum {
@ -159,14 +186,16 @@ func HasherForType[T any]() func(T) Sum {
rv := reflect.ValueOf(v) rv := reflect.ValueOf(v)
if rv.IsValid() { if rv.IsValid() {
// Always treat the Hash input as an interface (it is), including hashing if rv.Kind() == reflect.Pointer && !rv.IsNil() {
// its type, otherwise two Hash calls of different types could hash to the va := addressableValue{rv.Elem()} // dereferenced pointer is always addressable
// same bytes off the different types and get equivalent Sum values. This is h.hashType(va.Type())
// the same thing that we do for reflect.Kind Interface in hashValue, but h.hashValueWithType(va, tiElem, false)
// the initial reflect.ValueOf from an interface value effectively strips } else {
// the interface box off so we have to do it at the top level by hand. va := newAddressableValue(rv.Type())
h.hashType(rv.Type()) va.Set(rv)
h.hashValueWithType(rv, ti, false) h.hashType(va.Type())
h.hashValueWithType(va, ti, false)
}
} }
return h.sum() return h.sum()
} }
@ -238,7 +267,7 @@ type typeInfo struct {
} }
// returns ok if it was handled; else slow path runs // returns ok if it was handled; else slow path runs
type typeHasherFunc func(h *hasher, v reflect.Value) (ok bool) type typeHasherFunc func(h *hasher, v addressableValue) (ok bool)
var typeInfoMap sync.Map // map[reflect.Type]*typeInfo var typeInfoMap sync.Map // map[reflect.Type]*typeInfo
var typeInfoMapPopulate sync.Mutex // just for adding to typeInfoMap var typeInfoMapPopulate sync.Mutex // just for adding to typeInfoMap
@ -252,7 +281,7 @@ func (ti *typeInfo) buildHashFuncOnce() {
ti.hashFuncLazy = genTypeHasher(ti.rtype) ti.hashFuncLazy = genTypeHasher(ti.rtype)
} }
func (h *hasher) hashBoolv(v reflect.Value) bool { func (h *hasher) hashBoolv(v addressableValue) bool {
var b byte var b byte
if v.Bool() { if v.Bool() {
b = 1 b = 1
@ -261,56 +290,51 @@ func (h *hasher) hashBoolv(v reflect.Value) bool {
return true return true
} }
func (h *hasher) hashUint8v(v reflect.Value) bool { func (h *hasher) hashUint8v(v addressableValue) bool {
h.hashUint8(uint8(v.Uint())) h.hashUint8(uint8(v.Uint()))
return true return true
} }
func (h *hasher) hashInt8v(v reflect.Value) bool { func (h *hasher) hashInt8v(v addressableValue) bool {
h.hashUint8(uint8(v.Int())) h.hashUint8(uint8(v.Int()))
return true return true
} }
func (h *hasher) hashUint16v(v reflect.Value) bool { func (h *hasher) hashUint16v(v addressableValue) bool {
h.hashUint16(uint16(v.Uint())) h.hashUint16(uint16(v.Uint()))
return true return true
} }
func (h *hasher) hashInt16v(v reflect.Value) bool { func (h *hasher) hashInt16v(v addressableValue) bool {
h.hashUint16(uint16(v.Int())) h.hashUint16(uint16(v.Int()))
return true return true
} }
func (h *hasher) hashUint32v(v reflect.Value) bool { func (h *hasher) hashUint32v(v addressableValue) bool {
h.hashUint32(uint32(v.Uint())) h.hashUint32(uint32(v.Uint()))
return true return true
} }
func (h *hasher) hashInt32v(v reflect.Value) bool { func (h *hasher) hashInt32v(v addressableValue) bool {
h.hashUint32(uint32(v.Int())) h.hashUint32(uint32(v.Int()))
return true return true
} }
func (h *hasher) hashUint64v(v reflect.Value) bool { func (h *hasher) hashUint64v(v addressableValue) bool {
h.hashUint64(v.Uint()) h.hashUint64(v.Uint())
return true return true
} }
func (h *hasher) hashInt64v(v reflect.Value) bool { func (h *hasher) hashInt64v(v addressableValue) bool {
h.hashUint64(uint64(v.Int())) h.hashUint64(uint64(v.Int()))
return true return true
} }
func hashStructAppenderTo(h *hasher, v reflect.Value) bool { func hashStructAppenderTo(h *hasher, v addressableValue) bool {
if !v.CanInterface() { if !v.CanInterface() {
return false // slow path return false // slow path
} }
var a appenderTo a := v.Addr().Interface().(appenderTo)
if v.CanAddr() {
a = v.Addr().Interface().(appenderTo)
} else {
a = v.Interface().(appenderTo)
}
size := h.scratch[:8] size := h.scratch[:8]
record := a.AppendTo(size) record := a.AppendTo(size)
binary.LittleEndian.PutUint64(record, uint64(len(record)-len(size))) binary.LittleEndian.PutUint64(record, uint64(len(record)-len(size)))
@ -319,7 +343,7 @@ func hashStructAppenderTo(h *hasher, v reflect.Value) bool {
} }
// hashPointerAppenderTo hashes v, a reflect.Ptr, that implements appenderTo. // hashPointerAppenderTo hashes v, a reflect.Ptr, that implements appenderTo.
func hashPointerAppenderTo(h *hasher, v reflect.Value) bool { func hashPointerAppenderTo(h *hasher, v addressableValue) bool {
if !v.CanInterface() { if !v.CanInterface() {
return false // slow path return false // slow path
} }
@ -338,7 +362,7 @@ func hashPointerAppenderTo(h *hasher, v reflect.Value) bool {
// fieldInfo describes a struct field. // fieldInfo describes a struct field.
type fieldInfo struct { type fieldInfo struct {
index int // index of field for reflect.Value.Field(n) index int // index of field for reflect.Value.Field(n); -1 if invalid
typeInfo *typeInfo typeInfo *typeInfo
canMemHash bool canMemHash bool
offset uintptr // when we can memhash the field offset uintptr // when we can memhash the field
@ -380,32 +404,26 @@ func genHashStructFields(t reflect.Type) typeHasherFunc {
size: sf.Type.Size(), size: sf.Type.Size(),
}) })
} }
fieldsIfCanAddr := mergeContiguousFieldsCopy(fields) fields = mergeContiguousFieldsCopy(fields)
return structHasher{fields, fieldsIfCanAddr}.hash return structHasher{fields}.hash
} }
type structHasher struct { type structHasher struct {
fields, fieldsIfCanAddr []fieldInfo fields []fieldInfo
} }
func (sh structHasher) hash(h *hasher, v reflect.Value) bool { func (sh structHasher) hash(h *hasher, v addressableValue) bool {
var base unsafe.Pointer base := v.Addr().UnsafePointer()
if v.CanAddr() { for _, f := range sh.fields {
base = v.Addr().UnsafePointer()
for _, f := range sh.fieldsIfCanAddr {
if f.canMemHash { if f.canMemHash {
h.bw.Write(unsafe.Slice((*byte)(unsafe.Pointer(uintptr(base)+f.offset)), f.size)) h.bw.Write(unsafe.Slice((*byte)(unsafe.Pointer(uintptr(base)+f.offset)), f.size))
} else if !f.typeInfo.hasher()(h, v.Field(f.index)) { continue
return false
}
} }
} else { va := addressableValue{v.Field(f.index)} // field is addressable if parent struct is addressable
for _, f := range sh.fields { if !f.typeInfo.hasher()(h, va) {
if !f.typeInfo.hasher()(h, v.Field(f.index)) {
return false return false
} }
} }
}
return true return true
} }
@ -413,7 +431,7 @@ func (sh structHasher) hash(h *hasher, v reflect.Value) bool {
// the provided eleType. // the provided eleType.
func genHashPtrToMemoryRange(eleType reflect.Type) typeHasherFunc { func genHashPtrToMemoryRange(eleType reflect.Type) typeHasherFunc {
size := eleType.Size() size := eleType.Size()
return func(h *hasher, v reflect.Value) bool { return func(h *hasher, v addressableValue) bool {
if v.IsNil() { if v.IsNil() {
h.hashUint8(0) // indicates nil h.hashUint8(0) // indicates nil
} else { } else {
@ -489,18 +507,19 @@ func genTypeHasher(t reflect.Type) typeHasherFunc {
} }
if !typeIsRecursive(t) { if !typeIsRecursive(t) {
eti := getTypeInfo(et) eti := getTypeInfo(et)
return func(h *hasher, v reflect.Value) bool { return func(h *hasher, v addressableValue) bool {
if v.IsNil() { if v.IsNil() {
h.hashUint8(0) // indicates nil h.hashUint8(0) // indicates nil
return true return true
} }
h.hashUint8(1) // indicates visiting a pointer h.hashUint8(1) // indicates visiting a pointer
return eti.hasher()(h, v.Elem()) va := addressableValue{v.Elem()} // dereferenced pointer is always addressable
return eti.hasher()(h, va)
} }
} }
} }
return func(h *hasher, v reflect.Value) bool { return func(h *hasher, v addressableValue) bool {
if debug { if debug {
log.Printf("unhandled type %v", v.Type()) log.Printf("unhandled type %v", v.Type())
} }
@ -509,31 +528,31 @@ func genTypeHasher(t reflect.Type) typeHasherFunc {
} }
// hashString hashes v, of kind String. // hashString hashes v, of kind String.
func (h *hasher) hashString(v reflect.Value) bool { func (h *hasher) hashString(v addressableValue) bool {
s := v.String() s := v.String()
h.hashLen(len(s)) h.hashLen(len(s))
h.bw.WriteString(s) h.bw.WriteString(s)
return true return true
} }
func (h *hasher) hashFloat32v(v reflect.Value) bool { func (h *hasher) hashFloat32v(v addressableValue) bool {
h.hashUint32(math.Float32bits(float32(v.Float()))) h.hashUint32(math.Float32bits(float32(v.Float())))
return true return true
} }
func (h *hasher) hashFloat64v(v reflect.Value) bool { func (h *hasher) hashFloat64v(v addressableValue) bool {
h.hashUint64(math.Float64bits(v.Float())) h.hashUint64(math.Float64bits(v.Float()))
return true return true
} }
func (h *hasher) hashComplex64v(v reflect.Value) bool { func (h *hasher) hashComplex64v(v addressableValue) bool {
c := complex64(v.Complex()) c := complex64(v.Complex())
h.hashUint32(math.Float32bits(real(c))) h.hashUint32(math.Float32bits(real(c)))
h.hashUint32(math.Float32bits(imag(c))) h.hashUint32(math.Float32bits(imag(c)))
return true return true
} }
func (h *hasher) hashComplex128v(v reflect.Value) bool { func (h *hasher) hashComplex128v(v addressableValue) bool {
c := v.Complex() c := v.Complex()
h.hashUint64(math.Float64bits(real(c))) h.hashUint64(math.Float64bits(real(c)))
h.hashUint64(math.Float64bits(imag(c))) h.hashUint64(math.Float64bits(imag(c)))
@ -541,15 +560,8 @@ func (h *hasher) hashComplex128v(v reflect.Value) bool {
} }
// hashString hashes v, of kind time.Time. // hashString hashes v, of kind time.Time.
func (h *hasher) hashTimev(v reflect.Value) bool { func (h *hasher) hashTimev(v addressableValue) bool {
var t time.Time t := *(*time.Time)(v.Addr().UnsafePointer())
if v.CanAddr() {
t = *(*time.Time)(v.Addr().UnsafePointer())
} else if v.CanInterface() {
t = v.Interface().(time.Time)
} else {
return false
}
b := t.AppendFormat(h.scratch[:1], time.RFC3339Nano) b := t.AppendFormat(h.scratch[:1], time.RFC3339Nano)
b[0] = byte(len(b) - 1) // more than sufficient width; if not, good enough. b[0] = byte(len(b) - 1) // more than sufficient width; if not, good enough.
h.bw.Write(b) h.bw.Write(b)
@ -557,7 +569,7 @@ func (h *hasher) hashTimev(v reflect.Value) bool {
} }
// hashSliceMem hashes v, of kind Slice, with a memhash-able element type. // hashSliceMem hashes v, of kind Slice, with a memhash-able element type.
func (h *hasher) hashSliceMem(v reflect.Value) bool { func (h *hasher) hashSliceMem(v addressableValue) bool {
vLen := v.Len() vLen := v.Len()
h.hashUint64(uint64(vLen)) h.hashUint64(uint64(vLen))
if vLen == 0 { if vLen == 0 {
@ -568,20 +580,17 @@ func (h *hasher) hashSliceMem(v reflect.Value) bool {
} }
func genHashArrayMem(n int, arraySize uintptr, efu *typeInfo) typeHasherFunc { func genHashArrayMem(n int, arraySize uintptr, efu *typeInfo) typeHasherFunc {
byElement := genHashArrayElements(n, efu) return func(h *hasher, v addressableValue) bool {
return func(h *hasher, v reflect.Value) bool {
if v.CanAddr() {
h.bw.Write(unsafe.Slice((*byte)(v.Addr().UnsafePointer()), arraySize)) h.bw.Write(unsafe.Slice((*byte)(v.Addr().UnsafePointer()), arraySize))
return true return true
} }
return byElement(h, v)
}
} }
func genHashArrayElements(n int, eti *typeInfo) typeHasherFunc { func genHashArrayElements(n int, eti *typeInfo) typeHasherFunc {
return func(h *hasher, v reflect.Value) bool { return func(h *hasher, v addressableValue) bool {
for i := 0; i < n; i++ { for i := 0; i < n; i++ {
if !eti.hasher()(h, v.Index(i)) { va := addressableValue{v.Index(i)} // element is addressable if parent array is addressable
if !eti.hasher()(h, va) {
return false return false
} }
} }
@ -589,7 +598,7 @@ func genHashArrayElements(n int, eti *typeInfo) typeHasherFunc {
} }
} }
func noopHasherFunc(h *hasher, v reflect.Value) bool { return true } func noopHasherFunc(h *hasher, v addressableValue) bool { return true }
func genHashArray(t reflect.Type, eti *typeInfo) typeHasherFunc { func genHashArray(t reflect.Type, eti *typeInfo) typeHasherFunc {
if t.Size() == 0 { if t.Size() == 0 {
@ -611,11 +620,12 @@ type sliceElementHasher struct {
eti *typeInfo eti *typeInfo
} }
func (seh sliceElementHasher) hash(h *hasher, v reflect.Value) bool { func (seh sliceElementHasher) hash(h *hasher, v addressableValue) bool {
vLen := v.Len() vLen := v.Len()
h.hashUint64(uint64(vLen)) h.hashUint64(uint64(vLen))
for i := 0; i < vLen; i++ { for i := 0; i < vLen; i++ {
if !seh.eti.hasher()(h, v.Index(i)) { va := addressableValue{v.Index(i)} // slice elements are always addressable
if !seh.eti.hasher()(h, va) {
return false return false
} }
} }
@ -768,7 +778,7 @@ func canMemHash(t reflect.Type) bool {
return false return false
} }
func (h *hasher) hashValue(v reflect.Value, forceCycleChecking bool) { func (h *hasher) hashValue(v addressableValue, forceCycleChecking bool) {
if !v.IsValid() { if !v.IsValid() {
return return
} }
@ -776,7 +786,7 @@ func (h *hasher) hashValue(v reflect.Value, forceCycleChecking bool) {
h.hashValueWithType(v, ti, forceCycleChecking) h.hashValueWithType(v, ti, forceCycleChecking)
} }
func (h *hasher) hashValueWithType(v reflect.Value, ti *typeInfo, forceCycleChecking bool) { func (h *hasher) hashValueWithType(v addressableValue, ti *typeInfo, forceCycleChecking bool) {
w := h.bw w := h.bw
doCheckCycles := forceCycleChecking || ti.isRecursive doCheckCycles := forceCycleChecking || ti.isRecursive
@ -809,10 +819,12 @@ func (h *hasher) hashValueWithType(v reflect.Value, ti *typeInfo, forceCycleChec
} }
h.hashUint8(1) // indicates visiting a pointer h.hashUint8(1) // indicates visiting a pointer
h.hashValueWithType(v.Elem(), ti.elemTypeInfo, doCheckCycles) va := addressableValue{v.Elem()} // dereferenced pointer is always addressable
h.hashValueWithType(va, ti.elemTypeInfo, doCheckCycles)
case reflect.Struct: case reflect.Struct:
for i, n := 0, v.NumField(); i < n; i++ { for i, n := 0, v.NumField(); i < n; i++ {
h.hashValue(v.Field(i), doCheckCycles) va := addressableValue{v.Field(i)} // field is addressable if parent struct is addressable
h.hashValue(va, doCheckCycles)
} }
case reflect.Slice, reflect.Array: case reflect.Slice, reflect.Array:
vLen := v.Len() vLen := v.Len()
@ -825,7 +837,7 @@ func (h *hasher) hashValueWithType(v reflect.Value, ti *typeInfo, forceCycleChec
// It seems tempting to do this for all sizes, doing // It seems tempting to do this for all sizes, doing
// scratchSize bytes at a time, but reflect.Slice seems // scratchSize bytes at a time, but reflect.Slice seems
// to allocate, so it's not a win. // to allocate, so it's not a win.
n := reflect.Copy(reflect.ValueOf(&h.scratch).Elem(), v) n := reflect.Copy(reflect.ValueOf(&h.scratch).Elem(), v.Value)
w.Write(h.scratch[:n]) w.Write(h.scratch[:n])
return return
} }
@ -836,18 +848,21 @@ func (h *hasher) hashValueWithType(v reflect.Value, ti *typeInfo, forceCycleChec
// TODO(dsnet): Perform cycle detection for slices, // TODO(dsnet): Perform cycle detection for slices,
// which is functionally a list of pointers. // which is functionally a list of pointers.
// See https://github.com/google/go-cmp/blob/402949e8139bb890c71a707b6faf6dd05c92f4e5/cmp/compare.go#L438-L450 // See https://github.com/google/go-cmp/blob/402949e8139bb890c71a707b6faf6dd05c92f4e5/cmp/compare.go#L438-L450
h.hashValueWithType(v.Index(i), ti.elemTypeInfo, doCheckCycles) va := addressableValue{v.Index(i)} // slice elements are always addressable
h.hashValueWithType(va, ti.elemTypeInfo, doCheckCycles)
} }
case reflect.Interface: case reflect.Interface:
if v.IsNil() { if v.IsNil() {
h.hashUint8(0) // indicates nil h.hashUint8(0) // indicates nil
return return
} }
v = v.Elem() // TODO: Use a valueCache here?
va := newAddressableValue(v.Elem().Type())
va.Set(v.Elem())
h.hashUint8(1) // indicates visiting interface value h.hashUint8(1) // indicates visiting interface value
h.hashType(v.Type()) h.hashType(va.Type())
h.hashValue(v, doCheckCycles) h.hashValue(va, doCheckCycles)
case reflect.Map: case reflect.Map:
// Check for cycle. // Check for cycle.
if doCheckCycles { if doCheckCycles {
@ -911,12 +926,12 @@ var mapHasherPool = &sync.Pool{
New: func() any { return new(mapHasher) }, New: func() any { return new(mapHasher) },
} }
type valueCache map[reflect.Type]reflect.Value type valueCache map[reflect.Type]addressableValue
func (c *valueCache) get(t reflect.Type) reflect.Value { func (c *valueCache) get(t reflect.Type) addressableValue {
v, ok := (*c)[t] v, ok := (*c)[t]
if !ok { if !ok {
v = reflect.New(t).Elem() v = newAddressableValue(t)
if *c == nil { if *c == nil {
*c = make(valueCache) *c = make(valueCache)
} }
@ -929,12 +944,12 @@ func (c *valueCache) get(t reflect.Type) reflect.Value {
// It relies on a map being a functionally an unordered set of KV entries. // It relies on a map being a functionally an unordered set of KV entries.
// So long as we hash each KV entry together, we can XOR all // So long as we hash each KV entry together, we can XOR all
// of the individual hashes to produce a unique hash for the entire map. // of the individual hashes to produce a unique hash for the entire map.
func (h *hasher) hashMap(v reflect.Value, ti *typeInfo, checkCycles bool) { func (h *hasher) hashMap(v addressableValue, ti *typeInfo, checkCycles bool) {
mh := mapHasherPool.Get().(*mapHasher) mh := mapHasherPool.Get().(*mapHasher)
defer mapHasherPool.Put(mh) defer mapHasherPool.Put(mh)
iter := &mh.iter iter := &mh.iter
iter.Reset(v) iter.Reset(v.Value)
defer iter.Reset(reflect.Value{}) // avoid pinning v from mh.iter when we return defer iter.Reset(reflect.Value{}) // avoid pinning v from mh.iter when we return
var sum Sum var sum Sum
@ -983,8 +998,8 @@ func (v visitStack) pop(p pointer) {
// that would break if Go ever switched to a moving GC. // that would break if Go ever switched to a moving GC.
type pointer struct{ p unsafe.Pointer } type pointer struct{ p unsafe.Pointer }
func pointerOf(v reflect.Value) pointer { func pointerOf(v addressableValue) pointer {
return pointer{unsafe.Pointer(v.Pointer())} return pointer{unsafe.Pointer(v.Value.Pointer())}
} }
// hashType hashes a reflect.Type. // hashType hashes a reflect.Type.

@ -187,8 +187,16 @@ func TestQuick(t *testing.T) {
} }
} }
func getVal() []any { func getVal() any {
return []any{ return &struct {
WGConfig *wgcfg.Config
RouterConfig *router.Config
MapFQDNAddrs map[dnsname.FQDN][]netip.Addr
MapFQDNAddrPorts map[dnsname.FQDN][]netip.AddrPort
MapDiscoPublics map[key.DiscoPublic]bool
MapResponse *tailcfg.MapResponse
FilterMatch filter.Match
}{
&wgcfg.Config{ &wgcfg.Config{
Name: "foo", Name: "foo",
Addresses: []netip.Prefix{netip.PrefixFrom(netip.AddrFrom16([16]byte{3: 3}).Unmap(), 5)}, Addresses: []netip.Prefix{netip.PrefixFrom(netip.AddrFrom16([16]byte{3: 3}).Unmap(), 5)},
@ -468,6 +476,7 @@ func TestGetTypeHasher(t *testing.T) {
c uint16 c uint16
}{1, -1, 2}, }{1, -1, 2},
out: "\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x02\x00", out: "\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x02\x00",
out32: "\x01\x00\x00\x00\xff\xff\xff\xff\x02\x00",
}, },
{ {
name: "nil_int_ptr", name: "nil_int_ptr",
@ -529,7 +538,7 @@ func TestGetTypeHasher(t *testing.T) {
{ {
name: "time_ptr_via_unexported_value", name: "time_ptr_via_unexported_value",
val: *testtype.NewUnexportedAddressableTime(time.Unix(0, 0).In(time.UTC)), val: *testtype.NewUnexportedAddressableTime(time.Unix(0, 0).In(time.UTC)),
want: false, // neither addressable nor interface-able out: "\x141970-01-01T00:00:00Z",
}, },
{ {
name: "time_custom_zone", name: "time_custom_zone",
@ -614,12 +623,14 @@ func TestGetTypeHasher(t *testing.T) {
for _, tt := range tests { for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) { t.Run(tt.name, func(t *testing.T) {
rv := reflect.ValueOf(tt.val) rv := reflect.ValueOf(tt.val)
fn := getTypeInfo(rv.Type()).hasher() va := newAddressableValue(rv.Type())
va.Set(rv)
fn := getTypeInfo(va.Type()).hasher()
var buf bytes.Buffer var buf bytes.Buffer
h := &hasher{ h := &hasher{
bw: bufio.NewWriter(&buf), bw: bufio.NewWriter(&buf),
} }
got := fn(h, rv) got := fn(h, va)
const ptrSize = 32 << uintptr(^uintptr(0)>>63) const ptrSize = 32 << uintptr(^uintptr(0)>>63)
if tt.out32 != "" && ptrSize == 32 { if tt.out32 != "" && ptrSize == 32 {
tt.out = tt.out32 tt.out = tt.out32
@ -640,7 +651,7 @@ func TestGetTypeHasher(t *testing.T) {
} }
} }
var sink = Hash("foo") var sink Sum
func BenchmarkHash(b *testing.B) { func BenchmarkHash(b *testing.B) {
b.ReportAllocs() b.ReportAllocs()
@ -696,9 +707,9 @@ var filterRules = []tailcfg.FilterRule{
func BenchmarkHashPacketFilter(b *testing.B) { func BenchmarkHashPacketFilter(b *testing.B) {
b.ReportAllocs() b.ReportAllocs()
hash := HasherForType[[]tailcfg.FilterRule]() hash := HasherForType[*[]tailcfg.FilterRule]()
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
sink = hash(filterRules) sink = hash(&filterRules)
} }
} }
@ -715,7 +726,7 @@ func TestHashMapAcyclic(t *testing.T) {
ti := getTypeInfo(reflect.TypeOf(m)) ti := getTypeInfo(reflect.TypeOf(m))
for i := 0; i < 20; i++ { for i := 0; i < 20; i++ {
v := reflect.ValueOf(m) v := addressableValue{reflect.ValueOf(&m).Elem()}
buf.Reset() buf.Reset()
bw.Reset(&buf) bw.Reset(&buf)
h := &hasher{bw: bw} h := &hasher{bw: bw}
@ -738,7 +749,7 @@ func TestPrintArray(t *testing.T) {
var got bytes.Buffer var got bytes.Buffer
bw := bufio.NewWriter(&got) bw := bufio.NewWriter(&got)
h := &hasher{bw: bw} h := &hasher{bw: bw}
h.hashValue(reflect.ValueOf(x), false) h.hashValue(addressableValue{reflect.ValueOf(&x).Elem()}, false)
bw.Flush() bw.Flush()
const want = "\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1f" const want = "\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1f"
if got := got.Bytes(); string(got) != want { if got := got.Bytes(); string(got) != want {
@ -755,7 +766,7 @@ func BenchmarkHashMapAcyclic(b *testing.B) {
var buf bytes.Buffer var buf bytes.Buffer
bw := bufio.NewWriter(&buf) bw := bufio.NewWriter(&buf)
v := reflect.ValueOf(m) v := addressableValue{reflect.ValueOf(&m).Elem()}
ti := getTypeInfo(v.Type()) ti := getTypeInfo(v.Type())
h := &hasher{bw: bw} h := &hasher{bw: bw}

Loading…
Cancel
Save