internal/abi.Type.Size_ (field)
194 uses
internal/abi (current package)
type.go#L21: Size_ uintptr
type.go#L435: func (t *Type) Size() uintptr { return t.Size_ }
reflect
type.go#L591: return int(t.t.Size_) * 8
type.go#L1757: if typ.Size_ >= 1<<16 {
type.go#L1849: if ktyp.Size_ > abi.MapMaxKeyBytes {
type.go#L1853: mt.KeySize = uint8(ktyp.Size_)
type.go#L1855: if etyp.Size_ > abi.MapMaxElemBytes {
type.go#L1859: mt.MapType.ValueSize = uint8(etyp.Size_)
type.go#L1861: mt.MapType.BucketSize = uint16(mt.Bucket.Size_)
type.go#L2110: if ktyp.Size_ > abi.MapMaxKeyBytes {
type.go#L2113: if etyp.Size_ > abi.MapMaxElemBytes {
type.go#L2125: size := abi.MapBucketCount*(1+ktyp.Size_+etyp.Size_) + goarch.PtrSize
type.go#L2131: nptr := (abi.MapBucketCount*(1+ktyp.Size_+etyp.Size_) + goarch.PtrSize) / goarch.PtrSize
type.go#L2142: base += abi.MapBucketCount * ktyp.Size_ / goarch.PtrSize
type.go#L2147: base += abi.MapBucketCount * etyp.Size_ / goarch.PtrSize
type.go#L2162: Size_: size,
type.go#L2183: words := typ.Size_ / goarch.PtrSize
type.go#L2514: size = offset + ft.Size_
type.go#L2520: if ft.Size_ == 0 {
type.go#L2642: typ.Size_ = size
type.go#L2818: if typ.Size_ > 0 {
type.go#L2819: max := ^uintptr(0) / typ.Size_
type.go#L2824: array.Size_ = typ.Size_ * uintptr(length)
type.go#L2826: array.PtrBytes = typ.Size_*uintptr(length-1) + typ.PtrBytes
type.go#L2834: case !typ.Pointers() || array.Size_ == 0:
type.go#L2845: case typ.Kind_&abi.KindGCProg == 0 && array.Size_ <= abi.MaxPtrmaskBytes*8*goarch.PtrSize:
type.go#L2863: elemWords := typ.Size_ / goarch.PtrSize
type.go#L2884: array.PtrBytes = array.Size_ // overestimate but ok; must match program
type.go#L2993: Size_: align(abid.retOffset+abid.ret.stackBytes, goarch.PtrSize),
type.go#L3066: addTypeBits(bv, offset+uintptr(i)*tt.Elem.Size_, tt.Elem)
runtime
alg.go#L226: switch t.Size_ {
alg.go#L232: return memhash(p, h, t.Size_)
alg.go#L255: h = typehash(a.Elem, add(p, i*a.Elem.Size_), h)
alg.go#L320: if err := mapKeyError2(a.Elem, add(p, i*a.Elem.Size_)); err != nil {
arena.go#L450: size := typ.Size_
arena.go#L516: c.scanAlloc += size - (typ.Size_ - typ.PtrBytes)
arena.go#L540: mem, overflow := math.MulUintptr(typ.Size_, uintptr(n))
arena.go#L545: userArenaHeapBitsSetType(typ, add(ptr, uintptr(i)*typ.Size_), s)
arena.go#L585: h = h.pad(s, typ.Size_-typ.PtrBytes)
arena.go#L586: h.flush(s, uintptr(ptr), typ.Size_)
arena.go#L600: doubleCheckHeapPointersInterior(uintptr(ptr), uintptr(ptr), typ.Size_, typ.Size_, typ, &s.largeType, s)
arena.go#L803: span.largeType.Size_ = span.elemsize
arena.go#L1119: s.largeType.Size_ = s.elemsize
cgocall.go#L599: p = add(p, at.Elem.Size_)
cgocall.go#L648: p = add(p, st.Elem.Size_)
cgocheck.go#L81: cgoCheckMemmove2(typ, dst, src, 0, typ.Size_)
cgocheck.go#L125: cgoCheckTypedBlock(typ, p, 0, typ.Size_)
cgocheck.go#L126: p = add(p, typ.Size_)
cgocheck.go#L258: if off < at.Elem.Size_ {
cgocheck.go#L261: src = add(src, at.Elem.Size_)
cgocheck.go#L263: if skipped > at.Elem.Size_ {
cgocheck.go#L264: skipped = at.Elem.Size_
cgocheck.go#L266: checked := at.Elem.Size_ - skipped
cgocheck.go#L276: if off < f.Typ.Size_ {
cgocheck.go#L279: src = add(src, f.Typ.Size_)
cgocheck.go#L281: if skipped > f.Typ.Size_ {
cgocheck.go#L282: skipped = f.Typ.Size_
cgocheck.go#L284: checked := f.Typ.Size_ - skipped
chan.go#L77: if elem.Size_ >= 1<<16 {
chan.go#L84: mem, overflow := math.MulUintptr(elem.Size_, uintptr(size))
chan.go#L111: c.elemsize = uint16(elem.Size_)
chan.go#L117: print("makechan: chan=", c, "; elemsize=", elem.Size_, "; dataqsiz=", size, "\n")
chan.go#L382: typeBitsBulkBarrier(t, uintptr(dst), uintptr(src), t.Size_)
chan.go#L385: memmove(dst, src, t.Size_)
chan.go#L393: typeBitsBulkBarrier(t, uintptr(dst), uintptr(src), t.Size_)
chan.go#L394: memmove(dst, src, t.Size_)
checkptr.go#L25: if checkptrStraddles(p, n*elem.Size_) {
heapdump.go#L196: dumpint(uint64(t.Size_))
iface.go#L339: msanread(v, t.Size_)
iface.go#L342: asanread(v, t.Size_)
iface.go#L344: x := mallocgc(t.Size_, t, true)
iface.go#L354: msanread(v, t.Size_)
iface.go#L357: asanread(v, t.Size_)
iface.go#L360: x := mallocgc(t.Size_, t, false)
iface.go#L361: memmove(x, v, t.Size_)
malloc.go#L1386: return mallocgc(typ.Size_, typ, true)
malloc.go#L1402: return mallocgc(typ.Size_, typ, true)
malloc.go#L1407: return mallocgc(typ.Size_, typ, true)
malloc.go#L1425: return mallocgc(typ.Size_, typ, true)
malloc.go#L1427: mem, overflow := math.MulUintptr(typ.Size_, uintptr(n))
map.go#L319: mem, overflow := math.MulUintptr(uintptr(hint), t.Bucket.Size_)
map.go#L369: sz := t.Bucket.Size_ * nbuckets
map.go#L372: nbuckets = up / t.Bucket.Size_
map.go#L383: size := t.Bucket.Size_ * nbuckets
map.go#L417: msanread(key, t.Key.Size_)
map.go#L420: asanread(key, t.Key.Size_)
map.go#L487: msanread(key, t.Key.Size_)
map.go#L490: asanread(key, t.Key.Size_)
map.go#L626: msanread(key, t.Key.Size_)
map.go#L629: asanread(key, t.Key.Size_)
map.go#L749: msanread(key, t.Key.Size_)
map.go#L752: asanread(key, t.Key.Size_)
map.go#L798: memclrHasPointers(k, t.Key.Size_)
map.go#L804: memclrHasPointers(e, t.Elem.Size_)
map.go#L806: memclrNoHeapPointers(e, t.Elem.Size_)
map.go#L1414: if t.Key.Size_ > abi.MapMaxKeyBytes && (!t.IndirectKey() || t.KeySize != uint8(goarch.PtrSize)) ||
map.go#L1415: t.Key.Size_ <= abi.MapMaxKeyBytes && (t.IndirectKey() || t.KeySize != uint8(t.Key.Size_)) {
map.go#L1418: if t.Elem.Size_ > abi.MapMaxElemBytes && (!t.IndirectElem() || t.ValueSize != uint8(goarch.PtrSize)) ||
map.go#L1419: t.Elem.Size_ <= abi.MapMaxElemBytes && (t.IndirectElem() || t.ValueSize != uint8(t.Elem.Size_)) {
map.go#L1428: if t.Key.Size_%uintptr(t.Key.Align_) != 0 {
map.go#L1431: if t.Elem.Size_%uintptr(t.Elem.Align_) != 0 {
map_fast32.go#L341: memclrHasPointers(e, t.Elem.Size_)
map_fast32.go#L343: memclrNoHeapPointers(e, t.Elem.Size_)
map_fast64.go#L345: memclrHasPointers(e, t.Elem.Size_)
map_fast64.go#L347: memclrNoHeapPointers(e, t.Elem.Size_)
map_faststr.go#L360: memclrHasPointers(e, t.Elem.Size_)
map_faststr.go#L362: memclrNoHeapPointers(e, t.Elem.Size_)
mbarrier.go#L179: memmove(dst, src, typ.Size_)
mbarrier.go#L181: cgoCheckMemmove2(typ, dst, src, 0, typ.Size_)
mbarrier.go#L231: msanwrite(dst, typ.Size_)
mbarrier.go#L232: msanread(src, typ.Size_)
mbarrier.go#L235: asanwrite(dst, typ.Size_)
mbarrier.go#L236: asanread(src, typ.Size_)
mbarrier.go#L299: racewriterangepc(dstPtr, uintptr(n)*typ.Size_, callerpc, pc)
mbarrier.go#L300: racereadrangepc(srcPtr, uintptr(n)*typ.Size_, callerpc, pc)
mbarrier.go#L303: msanwrite(dstPtr, uintptr(n)*typ.Size_)
mbarrier.go#L304: msanread(srcPtr, uintptr(n)*typ.Size_)
mbarrier.go#L307: asanwrite(dstPtr, uintptr(n)*typ.Size_)
mbarrier.go#L308: asanread(srcPtr, uintptr(n)*typ.Size_)
mbarrier.go#L323: size := uintptr(n) * typ.Size_
mbarrier.go#L328: pwsize := size - typ.Size_ + typ.PtrBytes
mbarrier.go#L352: return slicecopy(dst.array, dst.len, src.array, src.len, elemType.Size_)
mbarrier.go#L375: memclrNoHeapPointers(ptr, typ.Size_)
mbarrier.go#L405: size := typ.Size_ * uintptr(len)
mbitmap.go#L286: tp.elem += tp.typ.Size_
mbitmap.go#L333: if n >= tp.typ.Size_ {
mbitmap.go#L337: tp.elem += (tp.addr - tp.elem + n) / tp.typ.Size_ * tp.typ.Size_
mbitmap.go#L346: tp.elem += tp.typ.Size_
mbitmap.go#L649: switch typ.Size_ {
mbitmap.go#L653: for i := typ.Size_; i < dataSize; i += typ.Size_ {
mbitmap.go#L655: scanSize += typ.Size_
mbitmap.go#L681: print("runtime: dataSize=", dataSize, " typ.Size_=", typ.Size_, " typ.PtrBytes=", typ.PtrBytes, "\n")
mbitmap.go#L737: gctyp.Size_ = typ.Size_
mbitmap.go#L773: size = (size + gctyp.Size_ - 1) / gctyp.Size_ * gctyp.Size_
mbitmap.go#L794: off := i % typ.Size_
mbitmap.go#L820: print("runtime: hasHeader=", header != nil, " typ.Size_=", typ.Size_, " hasGCProg=", typ.Kind_&abi.KindGCProg != 0, "\n")
mbitmap.go#L851: off := i % typ.Size_
mbitmap.go#L878: print("runtime: hasHeader=", header != nil, " typ.Size_=", typ.Size_, "\n")
mbitmap.go#L899: off := i % typ.Size_
mbitmap.go#L1382: if typ.Size_ != size {
mbitmap.go#L1383: println("runtime: typeBitsBulkBarrier with type ", toRType(typ).string(), " of size ", typ.Size_, " but memory size", size)
mbitmap.go#L1766: n := et.Size_
mbitmap.go#L1778: n := et.Size_
mbitmap.go#L1898: n := (*ptrtype)(unsafe.Pointer(t)).Elem.Size_
mfinal.go#L453: if ot.Elem == nil || ot.Elem.Pointers() || ot.Elem.Size_ >= maxTinySize {
mfinal.go#L504: nret = alignUp(nret, uintptr(t.Align_)) + t.Size_
select.go#L414: msanread(cas.elem, c.elemtype.Size_)
select.go#L416: msanwrite(cas.elem, c.elemtype.Size_)
select.go#L421: asanread(cas.elem, c.elemtype.Size_)
select.go#L423: asanwrite(cas.elem, c.elemtype.Size_)
select.go#L439: msanwrite(cas.elem, c.elemtype.Size_)
select.go#L442: asanwrite(cas.elem, c.elemtype.Size_)
select.go#L465: msanread(cas.elem, c.elemtype.Size_)
select.go#L468: asanread(cas.elem, c.elemtype.Size_)
select.go#L506: msanread(cas.elem, c.elemtype.Size_)
select.go#L509: asanread(cas.elem, c.elemtype.Size_)
slice.go#L42: tomem, overflow = math.MulUintptr(et.Size_, uintptr(tolen))
slice.go#L46: copymem = et.Size_ * uintptr(fromlen)
slice.go#L51: tomem = et.Size_ * uintptr(tolen)
slice.go#L102: mem, overflow := math.MulUintptr(et.Size_, uintptr(cap))
slice.go#L109: mem, overflow := math.MulUintptr(et.Size_, uintptr(len))
slice.go#L181: racereadrangepc(oldPtr, uintptr(oldLen*int(et.Size_)), callerpc, abi.FuncPCABIInternal(growslice))
slice.go#L184: msanread(oldPtr, uintptr(oldLen*int(et.Size_)))
slice.go#L187: asanread(oldPtr, uintptr(oldLen*int(et.Size_)))
slice.go#L194: if et.Size_ == 0 {
slice.go#L210: case et.Size_ == 1:
slice.go#L216: case et.Size_ == goarch.PtrSize:
slice.go#L222: case isPowerOfTwo(et.Size_):
slice.go#L226: shift = uintptr(sys.TrailingZeros64(uint64(et.Size_))) & 63
slice.go#L228: shift = uintptr(sys.TrailingZeros32(uint32(et.Size_))) & 31
slice.go#L237: lenmem = uintptr(oldLen) * et.Size_
slice.go#L238: newlenmem = uintptr(newLen) * et.Size_
slice.go#L239: capmem, overflow = math.MulUintptr(et.Size_, uintptr(newcap))
slice.go#L241: newcap = int(capmem / et.Size_)
slice.go#L242: capmem = uintptr(newcap) * et.Size_
slice.go#L280: bulkBarrierPreWriteSrcOnly(uintptr(p), uintptr(oldPtr), lenmem-et.Size_+et.PtrBytes, et)
slice.go#L342: oldcapmem := uintptr(old.cap) * et.Size_
slice.go#L343: newlenmem := uintptr(new.len) * et.Size_
stkframe.go#L284: off: -int32(alignUp(abiRegArgsType.Size_, 8)), // It's always the highest address local.
stkframe.go#L285: size: int32(abiRegArgsType.Size_),
unsafe.go#L58: if et.Size_ == 0 {
unsafe.go#L64: mem, overflow := math.MulUintptr(et.Size_, uintptr(len))
unsafe.go#L87: if checkptrStraddles(ptr, uintptr(len64)*et.Size_) {
 |
The pages are generated with Golds v0.7.6. (GOOS=linux GOARCH=amd64)
Golds is a Go 101 project developed by Tapir Liu.
PR and bug reports are welcome and can be submitted to the issue list.
Please follow @zigo_101 (reachable from the left QR code) to get the latest news of Golds. |