- Sort Score
- Result 10 results
- Languages All
Results 1 - 10 of 53 for sizeclass (0.13 sec)
-
src/runtime/mcentral.go
} } // grow allocates a new empty span from the heap and initializes it for c's size class. func (c *mcentral) grow() *mspan { npages := uintptr(class_to_allocnpages[c.spanclass.sizeclass()]) size := uintptr(class_to_size[c.spanclass.sizeclass()]) s := mheap_.alloc(npages, c.spanclass) if s == nil { return nil } // Use division by multiplication and shifts to quickly compute:
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Mar 25 19:53:03 UTC 2024 - 8.1K bytes - Viewed (0) -
src/runtime/mcache.go
// Count up how many slots were used and record it. stats := memstats.heapStats.acquire() slotsUsed := int64(s.allocCount) - int64(s.allocCountBeforeCache) atomic.Xadd64(&stats.smallAllocCount[spc.sizeclass()], slotsUsed) // Flush tinyAllocs. if spc == tinySpanClass { atomic.Xadd64(&stats.tinyAllocCount, int64(c.tinyAllocs)) c.tinyAllocs = 0 } memstats.heapStats.release()
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Mar 25 19:53:03 UTC 2024 - 10K bytes - Viewed (0) -
src/runtime/mheap.go
// since we're not holding the heap lock. s.spanclass = spanclass if sizeclass := spanclass.sizeclass(); sizeclass == 0 { s.elemsize = nbytes s.nelems = 1 s.divMul = 0 } else { s.elemsize = uintptr(class_to_size[sizeclass]) if !s.spanclass.noscan() && heapBitsInSpan(s.elemsize) { // Reserve space for the pointer/scan bitmap at the end.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 22 22:31:00 UTC 2024 - 78K bytes - Viewed (0) -
src/runtime/mksizeclasses.go
// so wasted space is at most 12.5%. allocsize := pageSize for allocsize%size > allocsize/8 { allocsize += pageSize } npages := allocsize / pageSize // If the previous sizeclass chose the same // allocation size and fit the same number of // objects into the page, we might as well // use just this size instead of having two // different sizes.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 22 20:31:27 UTC 2024 - 9.6K bytes - Viewed (0) -
src/runtime/export_test.go
continue } if s.isUnusedUserArenaChunk() { continue } if sizeclass := s.spanclass.sizeclass(); sizeclass == 0 { slow.Mallocs++ slow.Alloc += uint64(s.elemsize) } else { slow.Mallocs += uint64(s.allocCount) slow.Alloc += uint64(s.allocCount) * uint64(s.elemsize) bySize[sizeclass].Mallocs += uint64(s.allocCount) } }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu May 30 17:50:53 UTC 2024 - 46.1K bytes - Viewed (0) -
src/runtime/malloc.go
if hasHeader { size += mallocHeaderSize } var sizeclass uint8 if size <= smallSizeMax-8 { sizeclass = size_to_class8[divRoundUp(size, smallSizeDiv)] } else { sizeclass = size_to_class128[divRoundUp(size-smallSizeMax, largeSizeDiv)] } size = uintptr(class_to_size[sizeclass]) spc := makeSpanClass(sizeclass, noscan) span = c.alloc[spc] v := nextFreeFast(span)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 29 17:58:53 UTC 2024 - 59.6K bytes - Viewed (0) -
src/runtime/mgcsweep.go
} lock(&mheap_.lock) mheap_.userArena.quarantineList.remove(s) mheap_.userArena.readyList.insert(s) unlock(&mheap_.lock) }) return false } if spc.sizeclass() != 0 { // Handle spans for small objects. if nfreed > 0 { // Only mark the span as needing zeroing if we've freed any // objects, because a fresh span that had been allocated into,
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 08 17:52:18 UTC 2024 - 32.9K bytes - Viewed (0) -
src/runtime/mfinal.go
} throw("runtime.SetFinalizer: pointer not in allocated block") } // Move base forward if we've got an allocation header. if !span.spanclass.noscan() && !heapBitsInSpan(span.elemsize) && span.spanclass.sizeclass() != 0 { base += mallocHeaderSize } if uintptr(e.data) != base { // As an implementation detail we allow to set finalizers for an inner byte
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Jun 07 01:56:56 UTC 2024 - 19K bytes - Viewed (0) -
src/runtime/mbitmap.go
// Handle header-less objects. return typePointers{elem: addr, addr: addr, mask: span.heapBitsSmallForAddr(addr)} } // All of these objects have a header. var typ *_type if spc.sizeclass() != 0 { // Pull the allocation header from the first word of the object. typ = *(**_type)(unsafe.Pointer(addr)) addr += mallocHeaderSize } else { typ = span.largeType if typ == nil {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu May 23 00:18:55 UTC 2024 - 60K bytes - Viewed (0) -
src/runtime/mstats.go
// Collect large allocation stats. totalAlloc := consStats.largeAlloc nMalloc := consStats.largeAllocCount totalFree := consStats.largeFree nFree := consStats.largeFreeCount // Collect per-sizeclass stats. var bySize [_NumSizeClasses]struct { Size uint32 Mallocs uint64 Frees uint64 } for i := range bySize { bySize[i].Size = uint32(class_to_size[i]) // Malloc stats.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Apr 08 21:03:13 UTC 2024 - 34.2K bytes - Viewed (0)