internal/abi.MapType.Key (field)
63 uses
internal/abi (current package)
type.go#L469: Key *Type
type.go#L500: return (*MapType)(unsafe.Pointer(t)).Key
internal/concurrent
hashtriemap.go#L35: keyEqual: mapType.Key.Equal,
internal/reflectlite
type.go#L330: return toType(tt.Key)
reflect
type.go#L781: return toType(tt.Key)
type.go#L1828: if mt.Key == ktyp && mt.Elem == etyp {
type.go#L1842: mt.Key = ktyp
value.go#L1804: if (tt.Key == stringType || key.kind() == String) && tt.Key == key.typ() && tt.Elem.Size() <= abi.MapMaxElemBytes {
value.go#L1808: key = key.assignTo("reflect.Value.MapIndex", tt.Key, nil)
value.go#L1833: keyType := tt.Key
value.go#L1904: ktype := t.Key
value.go#L1928: ktype := t.Key
value.go#L2444: if (tt.Key == stringType || key.kind() == String) && tt.Key == key.typ() && tt.Elem.Size() <= abi.MapMaxElemBytes {
value.go#L2462: key = key.assignTo("reflect.Value.SetMapIndex", tt.Key, nil)
runtime
alg.go#L278: return mapKeyError2(t.Key, p)
map.go#L414: raceReadObjectPC(t.Key, key, callerpc, pc)
map.go#L417: msanread(key, t.Key.Size_)
map.go#L420: asanread(key, t.Key.Size_)
map.go#L458: if t.Key.Equal(key, k) {
map.go#L484: raceReadObjectPC(t.Key, key, callerpc, pc)
map.go#L487: msanread(key, t.Key.Size_)
map.go#L490: asanread(key, t.Key.Size_)
map.go#L528: if t.Key.Equal(key, k) {
map.go#L572: if t.Key.Equal(key, k) {
map.go#L623: raceReadObjectPC(t.Key, key, callerpc, pc)
map.go#L626: msanread(key, t.Key.Size_)
map.go#L629: asanread(key, t.Key.Size_)
map.go#L673: if !t.Key.Equal(key, k) {
map.go#L678: typedmemmove(t.Key, k, key)
map.go#L709: kmem := newobject(t.Key)
map.go#L717: typedmemmove(t.Key, insertk, key)
map.go#L746: raceReadObjectPC(t.Key, key, callerpc, pc)
map.go#L749: msanread(key, t.Key.Size_)
map.go#L752: asanread(key, t.Key.Size_)
map.go#L791: if !t.Key.Equal(key, k2) {
map.go#L797: } else if t.Key.Pointers() {
map.go#L798: memclrHasPointers(k, t.Key.Size_)
map.go#L1004: if t.ReflexiveKey() || t.Key.Equal(k, k) {
map.go#L1025: !(t.ReflexiveKey() || t.Key.Equal(k, k)) {
map.go#L1299: if h.flags&iterator != 0 && !t.ReflexiveKey() && !t.Key.Equal(k2, k2) {
map.go#L1337: typedmemmove(t.Key, dst.k, k) // copy elem
map.go#L1411: if t.Key.Equal == nil {
map.go#L1414: if t.Key.Size_ > abi.MapMaxKeyBytes && (!t.IndirectKey() || t.KeySize != uint8(goarch.PtrSize)) ||
map.go#L1415: t.Key.Size_ <= abi.MapMaxKeyBytes && (t.IndirectKey() || t.KeySize != uint8(t.Key.Size_)) {
map.go#L1422: if t.Key.Align_ > abi.MapBucketCount {
map.go#L1428: if t.Key.Size_%uintptr(t.Key.Align_) != 0 {
map.go#L1437: if dataOffset%uintptr(t.Key.Align_) != 0 {
map.go#L1650: kStore := newobject(t.Key)
map.go#L1651: typedmemmove(t.Key, kStore, srcK)
map.go#L1659: typedmemmove(t.Key, dstK, srcK)
map.go#L1841: typedmemmove(t.Key, add(s.array, uintptr(s.len)*uintptr(t.Key.Size())), k)
map_fast32.go#L334: if goarch.PtrSize == 4 && t.Key.Pointers() {
map_fast32.go#L460: if goarch.PtrSize == 4 && t.Key.Pointers() && writeBarrier.enabled {
map_fast64.go#L334: if t.Key.Pointers() {
map_fast64.go#L464: if t.Key.Pointers() && writeBarrier.enabled {
map_fast64.go#L471: typedmemmove(t.Key, dst.k, k)
type.go#L444: return typesEqual(mt.Key, mv.Key, seen) && typesEqual(mt.Elem, mv.Elem, seen)
 |
The pages are generated with Golds v0.7.6. (GOOS=linux GOARCH=amd64)
Golds is a Go 101 project developed by Tapir Liu.
PR and bug reports are welcome and can be submitted to the issue list.
Please follow @zigo_101 (reachable from the left QR code) to get the latest news of Golds. |