Search Options

Results per page
Sort
Preferred Languages
Advance

Results 11 - 20 of 339 for mspan (0.08 sec)

  1. src/runtime/mcache.go

    		c = pp.mcache
    	}
    	return c
    }
    
    // refill acquires a new span of span class spc for c. This span will
    // have at least one free object. The current span in c must be full.
    //
    // Must run in a non-preemptible context since otherwise the owner of
    // c could change.
    func (c *mcache) refill(spc spanClass) {
    	// Return the current cached span to the central lists.
    	s := c.alloc[spc]
    
    	if s.allocCount != s.nelems {
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Mon Mar 25 19:53:03 UTC 2024
    - 10K bytes
    - Viewed (0)
  2. src/runtime/malloc.go

    		span = c.allocLarge(size, noscan)
    		span.freeindex = 1
    		span.allocCount = 1
    		size = span.elemsize
    		x = unsafe.Pointer(span.base())
    		if needzero && span.needzero != 0 {
    			delayedZeroing = true
    		}
    		if !noscan {
    			// Tell the GC not to look at this yet.
    			span.largeType = nil
    			header = &span.largeType
    		}
    	}
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 29 17:58:53 UTC 2024
    - 59.6K bytes
    - Viewed (0)
  3. src/runtime/export_test.go

    	}
    	return false
    }
    
    // mspan wrapper for testing.
    type MSpan mspan
    
    // Allocate an mspan for testing.
    func AllocMSpan() *MSpan {
    	var s *mspan
    	systemstack(func() {
    		lock(&mheap_.lock)
    		s = (*mspan)(mheap_.spanalloc.alloc())
    		unlock(&mheap_.lock)
    	})
    	return (*MSpan)(s)
    }
    
    // Free an allocated mspan.
    func FreeMSpan(s *MSpan) {
    	systemstack(func() {
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu May 30 17:50:53 UTC 2024
    - 46.1K bytes
    - Viewed (0)
  4. src/runtime/metrics/doc.go

    		Memory that is occupied by runtime mcache structures that are
    		currently being used.
    
    	/memory/classes/metadata/mspan/free:bytes
    		Memory that is reserved for runtime mspan structures, but not
    		in-use.
    
    	/memory/classes/metadata/mspan/inuse:bytes
    		Memory that is occupied by runtime mspan structures that are
    		currently being used.
    
    	/memory/classes/metadata/other:bytes
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 22 22:58:43 UTC 2024
    - 20K bytes
    - Viewed (0)
  5. src/runtime/metrics.go

    				out.scalar = in.sysStats.mCacheInUse
    			},
    		},
    		"/memory/classes/metadata/mspan/free:bytes": {
    			deps: makeStatDepSet(sysStatsDep),
    			compute: func(in *statAggregate, out *metricValue) {
    				out.kind = metricKindUint64
    				out.scalar = in.sysStats.mSpanSys - in.sysStats.mSpanInUse
    			},
    		},
    		"/memory/classes/metadata/mspan/inuse:bytes": {
    			deps: makeStatDepSet(sysStatsDep),
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Mon Apr 08 21:03:13 UTC 2024
    - 26K bytes
    - Viewed (0)
  6. src/runtime/mgcmark.go

    		}
    
    		// Check if val points to a heap span.
    		span := spanOfHeap(val)
    		if span == nil {
    			continue
    		}
    
    		// Check if val points to an allocated object.
    		idx := span.objIndex(val)
    		if span.isFree(idx) {
    			continue
    		}
    
    		// val points to an allocated object. Mark it.
    		obj := span.base() + idx*span.elemsize
    		greyobject(obj, b, i, span, gcw, idx)
    	}
    }
    
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu Apr 18 21:25:11 UTC 2024
    - 52.5K bytes
    - Viewed (0)
  7. src/runtime/mgcwork.go

    	const batchSize = 64 // ~1–2 µs per span.
    	lock(&work.wbufSpans.lock)
    	if gcphase != _GCoff || work.wbufSpans.free.isEmpty() {
    		unlock(&work.wbufSpans.lock)
    		return false
    	}
    	systemstack(func() {
    		gp := getg().m.curg
    		for i := 0; i < batchSize && !(preemptible && gp.preempt); i++ {
    			span := work.wbufSpans.free.first
    			if span == nil {
    				break
    			}
    			work.wbufSpans.free.remove(span)
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Mon Mar 25 19:53:03 UTC 2024
    - 12.9K bytes
    - Viewed (0)
  8. src/runtime/stack.go

    	if s.state.get() != mSpanManual {
    		throw("freeing stack not in a stack span")
    	}
    	if s.manualFreeList.ptr() == nil {
    		// s will now have a free stack
    		stackpool[order].item.span.insert(s)
    	}
    	x.ptr().next = s.manualFreeList
    	s.manualFreeList = x
    	s.allocCount--
    	if gcphase == _GCoff && s.allocCount == 0 {
    		// Span is completely free. Return it to the heap
    		// immediately if we're sweeping.
    		//
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 22 22:31:00 UTC 2024
    - 41.1K bytes
    - Viewed (0)
  9. src/runtime/mstats.go

    	// regions of memory 8K or larger. A span may be in one of
    	// three states:
    	//
    	// An "idle" span contains no objects or other data. The
    	// physical memory backing an idle span can be released back
    	// to the OS (but the virtual address space never is), or it
    	// can be converted into an "in use" or "stack" span.
    	//
    	// An "in use" span contains at least one heap object and may
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Mon Apr 08 21:03:13 UTC 2024
    - 34.2K bytes
    - Viewed (0)
  10. src/runtime/heapdump.go

    	// bss segment
    	dumpint(tagBSS)
    	dumpint(uint64(firstmoduledata.bss))
    	dumpmemrange(unsafe.Pointer(firstmoduledata.bss), firstmoduledata.ebss-firstmoduledata.bss)
    	dumpfields(firstmoduledata.gcbssmask)
    
    	// mspan.types
    	for _, s := range mheap_.allspans {
    		if s.state.get() == mSpanInUse {
    			// Finalizers
    			for sp := s.specials; sp != nil; sp = sp.next {
    				if sp.kind != _KindSpecialFinalizer {
    					continue
    				}
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Tue Apr 09 04:07:57 UTC 2024
    - 17.6K bytes
    - Viewed (0)
Back to top