- Sort Score
- Result 10 results
- Languages All
Results 1 - 10 of 208 for noscan (0.73 sec)
-
src/runtime/msize.go
// minus any inline space for metadata. func roundupsize(size uintptr, noscan bool) (reqSize uintptr) { reqSize = size if reqSize <= maxSmallSize-mallocHeaderSize { // Small object. if !noscan && reqSize > minSizeForMallocHeader { // !noscan && !heapBitsInSpan(reqSize) reqSize += mallocHeaderSize }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Apr 09 04:07:57 UTC 2024 - 1.3K bytes - Viewed (0) -
src/runtime/slice.go
// For goarch.PtrSize, compiler will optimize division/multiplication into a shift by a constant. // For powers of 2, use a variable shift. noscan := !et.Pointers() switch { case et.Size_ == 1: lenmem = uintptr(oldLen) newlenmem = uintptr(newLen) capmem = roundupsize(uintptr(newcap), noscan) overflow = uintptr(newcap) > maxAlloc newcap = int(capmem) case et.Size_ == goarch.PtrSize:
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 29 16:25:21 UTC 2024 - 12.2K bytes - Viewed (0) -
src/runtime/malloc.go
span = c.allocLarge(size, noscan) span.freeindex = 1 span.allocCount = 1 size = span.elemsize x = unsafe.Pointer(span.base()) if needzero && span.needzero != 0 { delayedZeroing = true } if !noscan { // Tell the GC not to look at this yet. span.largeType = nil header = &span.largeType } } if !noscan && !delayedZeroing {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 29 17:58:53 UTC 2024 - 59.6K bytes - Viewed (0) -
src/runtime/mcache.go
c.scanAlloc = 0 c.alloc[spc] = s } // allocLarge allocates a span for a large object. func (c *mcache) allocLarge(size uintptr, noscan bool) *mspan { if size+_PageSize < size { throw("out of memory") } npages := size >> _PageShift if size&_PageMask != 0 { npages++ } // Deduct credit for this span allocation and sweep if
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Mar 25 19:53:03 UTC 2024 - 10K bytes - Viewed (0) -
src/runtime/mheap.go
} } h.allspans = h.allspans[:len(h.allspans)+1] h.allspans[len(h.allspans)-1] = s } // A spanClass represents the size class and noscan-ness of a span. // // Each size class has a noscan spanClass and a scan spanClass. The // noscan spanClass contains only noscan objects, which do not contain // pointers and thus do not need to be scanned by the garbage // collector. type spanClass uint8 const (
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 22 22:31:00 UTC 2024 - 78K bytes - Viewed (0) -
src/runtime/arena.go
// // The sweeper handles moving chunks out of this quarantine state to be ready for // reuse. When the chunk is placed into the quarantine state, its corresponding // span is marked as noscan so that the GC doesn't try to scan memory that would // cause a fault. // // At the next layer are the user arenas themselves. They consist of a single // active chunk which new Go values are bump-allocated into and a list of chunks
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 08 17:44:56 UTC 2024 - 37.9K bytes - Viewed (0) -
src/runtime/mgcmark.go
s := spanOfUnchecked(b) n := s.elemsize if n == 0 { throw("scanobject n == 0") } if s.spanclass.noscan() { // Correctness-wise this is ok, but it's inefficient // if noscan objects reach here. throw("scanobject of a noscan object") } var tp typePointers if n > maxObletBytes { // Large object. Break into oblets for better // parallelism and lower latency.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu Apr 18 21:25:11 UTC 2024 - 52.5K bytes - Viewed (0) -
src/runtime/mbitmap.go
// //go:nosplit func (span *mspan) heapBits() []uintptr { const doubleCheck = false if doubleCheck && !span.isUserArenaChunk { if span.spanclass.noscan() { throw("heapBits called for noscan") } if span.elemsize > minSizeForMallocHeader { throw("heapBits called for span class that should have a malloc header") } } // Find the bitmap at the end of the span. //
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu May 23 00:18:55 UTC 2024 - 60K bytes - Viewed (0) -
src/runtime/mwbbuf.go
// Mark span. arena, pageIdx, pageMask := pageIndexOf(span.base()) if arena.pageMarks[pageIdx]&pageMask == 0 { atomic.Or8(&arena.pageMarks[pageIdx], pageMask) } if span.spanclass.noscan() { gcw.bytesMarked += uint64(span.elemsize) continue } ptrs[pos] = obj pos++ } // Enqueue the greyed objects. gcw.putBatch(ptrs[:pos]) pp.wbBuf.reset()
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Mar 25 19:53:03 UTC 2024 - 8.1K bytes - Viewed (0) -
src/runtime/mfinal.go
if isGoPointerWithoutSpan(e.data) { return } throw("runtime.SetFinalizer: pointer not in allocated block") } // Move base forward if we've got an allocation header. if !span.spanclass.noscan() && !heapBitsInSpan(span.elemsize) && span.spanclass.sizeclass() != 0 { base += mallocHeaderSize } if uintptr(e.data) != base { // As an implementation detail we allow to set finalizers for an inner byte
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Jun 07 01:56:56 UTC 2024 - 19K bytes - Viewed (0)